Skip to content

Commit

Permalink
Add files via upload
Browse files Browse the repository at this point in the history
  • Loading branch information
techvaquero authored Feb 14, 2024
1 parent 5723c52 commit 72b0811
Show file tree
Hide file tree
Showing 6 changed files with 268 additions and 0 deletions.
44 changes: 44 additions & 0 deletions tmp/beyondsqltest/testlookmlfiles/dos.lkml
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
dos = """{{
config(
materialized = 'table'
)
}}

select
convert(string,c_custkey) as stringkey,
c_name,
c_address,
c_nationkey,
c_phone,
dlog10(c_acctbal) as actbalbaseten,
dlog10(c_acctbal) as actbalbaseten,
JSON_EXTRACT_PATH_TEXT('{"f2":{"f3":1},"f4":{"f5":99,"f6":"star"}}','f4', 'f6'),
dexp(100),
date_part(dow, 2008-01-05 14:00:00),
hll_cardinality(expr),
JSON_ARRAY_LENGTH('[11,12,13,{"f1":21,"f2":[25,26]},14]'),
c_mktsegment,
c_comment,
getdate() as hoy,
GETDATE AS get_date_caps_test,
sysdate() AS sys_date_col_test,
SYSDATE() AS sys_date_caps_col_test,
ISNULL(test, test_is_null) AS null_test_col_caps,
ISNULL(test, test_is_null) AS null_test_col_caps,
isnull(test, 'test_is_null') AS null_test_col,
date_part(year, date(origination_date)) || '-' || 'Q' || floor(
(date_part(month, date(origination_date)) - 1) / 3) + 1 as origination_quarter,
date_part(SECONDS, '2019-10-01 00:00:01.000001'::timestamp)
first_value(
case when colA = 2 then id2
end ignore nulls
) over (
partition by
customer_id
order by
created_at
rows between unbounded preceding and unbounded following
) as test_syntax_change
from
redshift_sample_data.tpch_rs1.customer
ORDER BY colC,colB DESC)"""
44 changes: 44 additions & 0 deletions tmp/beyondsqltest/testlookmlfiles/tres.lkml
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
uno = """{{
config(
materialized = 'table'
)
}}

select
convert(string,c_custkey) as stringkey,
c_name,
c_address,
c_nationkey,
c_phone,
dlog10(c_acctbal) as actbalbaseten,
dlog10(c_acctbal) as actbalbaseten,
JSON_EXTRACT_PATH_TEXT('{"f2":{"f3":1},"f4":{"f5":99,"f6":"star"}}','f4', 'f6'),
dexp(100),
date_part(dow, 2008-01-05 14:00:00),
hll_cardinality(expr),
JSON_ARRAY_LENGTH('[11,12,13,{"f1":21,"f2":[25,26]},14]'),
c_mktsegment,
c_comment,
getdate() as hoy,
GETDATE AS get_date_caps_test,
sysdate() AS sys_date_col_test,
SYSDATE() AS sys_date_caps_col_test,
ISNULL(test, test_is_null) AS null_test_col_caps,
ISNULL(test, test_is_null) AS null_test_col_caps,
isnull(test, 'test_is_null') AS null_test_col,
date_part(year, date(origination_date)) || '-' || 'Q' || floor(
(date_part(month, date(origination_date)) - 1) / 3) + 1 as origination_quarter,
date_part(SECONDS, '2019-10-01 00:00:01.000001'::timestamp)
first_value(
case when colA = 2 then id2
end ignore nulls
) over (
partition by
customer_id
order by
created_at
rows between unbounded preceding and unbounded following
) as test_syntax_change
from
redshift_sample_data.tpch_rs1.customer
ORDER BY colC,colB DESC)"""
45 changes: 45 additions & 0 deletions tmp/beyondsqltest/testlookmlfiles/uno.lkml
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
uno = """{{
config(
materialized = 'table'
)
}}

select
convert(string,c_custkey) as stringkey,
c_name,
c_address,
c_nationkey,
c_phone,
dlog10(c_acctbal) as actbalbaseten,
dlog10(c_acctbal) as actbalbaseten,
JSON_EXTRACT_PATH_TEXT('{"f2":{"f3":1},"f4":{"f5":99,"f6":"star"}}','f4', 'f6'),
dexp(100),
date_part(dow, 2008-01-05 14:00:00),
hll_cardinality(expr),
JSON_ARRAY_LENGTH('[11,12,13,{"f1":21,"f2":[25,26]},14]'),
c_mktsegment,
c_comment,
getdate() as hoy,
GETDATE AS get_date_caps_test,
sysdate() AS sys_date_col_test,
SYSDATE() AS sys_date_caps_col_test,
ISNULL(test, test_is_null) AS null_test_col_caps,
ISNULL(test, test_is_null) AS null_test_col_caps,
isnull(test, 'test_is_null') AS null_test_col,
date_part(year, date(origination_date)) || '-' || 'Q' || floor(
(date_part(month, date(origination_date)) - 1) / 3) + 1 as origination_quarter,
date_part(SECONDS, '2019-10-01 00:00:01.000001'::timestamp)
first_value(
case when colA = 2 then id2
end ignore nulls
) over (
partition by
customer_id
order by
created_at
rows between unbounded preceding and unbounded following
) as test_syntax_change
from
redshift_sample_data.tpch_rs1.customer
ORDER BY colC,colB DESC)"""
print(uno)
45 changes: 45 additions & 0 deletions tmp/beyondsqltest/testpyfiles/dos.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
dos = """{{
config(
materialized = 'table'
)
}}
select
convert(string,c_custkey) as stringkey,
c_name,
c_address,
c_nationkey,
c_phone,
dlog10(c_acctbal) as actbalbaseten,
dlog10(c_acctbal) as actbalbaseten,
JSON_EXTRACT_PATH_TEXT('{"f2":{"f3":1},"f4":{"f5":99,"f6":"star"}}','f4', 'f6'),
dexp(100),
date_part(dow, 2008-01-05 14:00:00),
hll_cardinality(expr),
JSON_ARRAY_LENGTH('[11,12,13,{"f1":21,"f2":[25,26]},14]'),
c_mktsegment,
c_comment,
getdate() as hoy,
GETDATE AS get_date_caps_test,
sysdate() AS sys_date_col_test,
SYSDATE() AS sys_date_caps_col_test,
ISNULL(test, test_is_null) AS null_test_col_caps,
ISNULL(test, test_is_null) AS null_test_col_caps,
isnull(test, 'test_is_null') AS null_test_col,
date_part(year, date(origination_date)) || '-' || 'Q' || floor(
(date_part(month, date(origination_date)) - 1) / 3) + 1 as origination_quarter,
date_part(SECONDS, '2019-10-01 00:00:01.000001'::timestamp)
first_value(
case when colA = 2 then id2
end ignore nulls
) over (
partition by
customer_id
order by
created_at
rows between unbounded preceding and unbounded following
) as test_syntax_change
from
redshift_sample_data.tpch_rs1.customer
ORDER BY colC,colB DESC)"""
print(dos)
45 changes: 45 additions & 0 deletions tmp/beyondsqltest/testpyfiles/tres.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
tres = """{{
config(
materialized = 'table'
)
}}
select
convert(string,c_custkey) as stringkey,
c_name,
c_address,
c_nationkey,
c_phone,
dlog10(c_acctbal) as actbalbaseten,
dlog10(c_acctbal) as actbalbaseten,
JSON_EXTRACT_PATH_TEXT('{"f2":{"f3":1},"f4":{"f5":99,"f6":"star"}}','f4', 'f6'),
dexp(100),
date_part(dow, 2008-01-05 14:00:00),
hll_cardinality(expr),
JSON_ARRAY_LENGTH('[11,12,13,{"f1":21,"f2":[25,26]},14]'),
c_mktsegment,
c_comment,
getdate() as hoy,
GETDATE AS get_date_caps_test,
sysdate() AS sys_date_col_test,
SYSDATE() AS sys_date_caps_col_test,
ISNULL(test, test_is_null) AS null_test_col_caps,
ISNULL(test, test_is_null) AS null_test_col_caps,
isnull(test, 'test_is_null') AS null_test_col,
date_part(year, date(origination_date)) || '-' || 'Q' || floor(
(date_part(month, date(origination_date)) - 1) / 3) + 1 as origination_quarter,
date_part(SECONDS, '2019-10-01 00:00:01.000001'::timestamp)
first_value(
case when colA = 2 then id2
end ignore nulls
) over (
partition by
customer_id
order by
created_at
rows between unbounded preceding and unbounded following
) as test_syntax_change
from
redshift_sample_data.tpch_rs1.customer
ORDER BY colC,colB DESC)"""
print(tres)
45 changes: 45 additions & 0 deletions tmp/beyondsqltest/testpyfiles/uno.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
uno = """{{
config(
materialized = 'table'
)
}}
select
convert(string,c_custkey) as stringkey,
c_name,
c_address,
c_nationkey,
c_phone,
dlog10(c_acctbal) as actbalbaseten,
dlog10(c_acctbal) as actbalbaseten,
JSON_EXTRACT_PATH_TEXT('{"f2":{"f3":1},"f4":{"f5":99,"f6":"star"}}','f4', 'f6'),
dexp(100),
date_part(dow, 2008-01-05 14:00:00),
hll_cardinality(expr),
JSON_ARRAY_LENGTH('[11,12,13,{"f1":21,"f2":[25,26]},14]'),
c_mktsegment,
c_comment,
getdate() as hoy,
GETDATE AS get_date_caps_test,
sysdate() AS sys_date_col_test,
SYSDATE() AS sys_date_caps_col_test,
ISNULL(test, test_is_null) AS null_test_col_caps,
ISNULL(test, test_is_null) AS null_test_col_caps,
isnull(test, 'test_is_null') AS null_test_col,
date_part(year, date(origination_date)) || '-' || 'Q' || floor(
(date_part(month, date(origination_date)) - 1) / 3) + 1 as origination_quarter,
date_part(SECONDS, '2019-10-01 00:00:01.000001'::timestamp)
first_value(
case when colA = 2 then id2
end ignore nulls
) over (
partition by
customer_id
order by
created_at
rows between unbounded preceding and unbounded following
) as test_syntax_change
from
redshift_sample_data.tpch_rs1.customer
ORDER BY colC,colB DESC)"""
print(uno)

0 comments on commit 72b0811

Please sign in to comment.