init
This commit is contained in:
parent
da5c6a478e
commit
d52e2fc9bf
|
@ -0,0 +1,10 @@
|
|||
#!bin/bash
|
||||
|
||||
push:
|
||||
git add . && git commit -a -m "update" && git push
|
||||
|
||||
run:
|
||||
dbt deps --profiles-dir=. --project-dir=.
|
||||
#dbt run --profiles-dir=. --project-dir=. --full-refresh
|
||||
dbt run --profiles-dir=. --project-dir=. --full-refresh --select orders
|
||||
|
20
README.md
20
README.md
|
@ -1 +1,19 @@
|
|||
# dbt-selly
|
||||
## Installing dbt
|
||||
|
||||
1. Activate your venv and run `pip3 install dbt`
|
||||
1. Copy `airbyte-normalization/sample_files/profiles.yml` over to `~/.dbt/profiles.yml`
|
||||
1. Edit to configure your profiles accordingly
|
||||
|
||||
## Running dbt
|
||||
|
||||
1. `cd airbyte-normalization`
|
||||
1. You can now run dbt commands, to check the setup is fine: `dbt debug`
|
||||
1. To build the dbt tables in your warehouse: `dbt run`
|
||||
|
||||
## Running dbt from Airbyte generated config
|
||||
|
||||
1. You can also change directory (`cd /tmp/dev_root/workspace/1/0/normalize` for example) to one of the workspace generated by Airbyte within one of the `normalize` folder.
|
||||
1. You should find `profiles.yml` and a bunch of other dbt files/folders created there.
|
||||
1. To check everything is setup properly: `dbt debug --profiles-dir=$(pwd) --project-dir=$(pwd)`
|
||||
1. You can modify the `.sql` files and run `dbt run --profiles-dir=$(pwd) --project-dir=$(pwd)` too
|
||||
1. You can inspect compiled dbt `.sql` files before they are run in the destination engine in `normalize/build/compiled` or `normalize/build/run` folders
|
|
@ -0,0 +1,63 @@
|
|||
# This file is necessary to install dbt-utils with dbt deps
|
||||
# the content will be overwritten by the transform function
|
||||
|
||||
# Name your package! Package names should contain only lowercase characters
|
||||
# and underscores. A good package name should reflect your organization's
|
||||
# name or the intended use of these models
|
||||
name: 'airbyte_utils'
|
||||
version: '1.0'
|
||||
config-version: 2
|
||||
|
||||
# This setting configures which "profile" dbt uses for this project. Profiles contain
|
||||
# database connection information, and should be configured in the ~/.dbt/profiles.yml file
|
||||
profile: 'normalize'
|
||||
|
||||
# These configurations specify where dbt should look for different types of files.
|
||||
# The `source-paths` config, for example, states that source models can be found
|
||||
# in the "models/" directory. You probably won't need to change these!
|
||||
source-paths: ["models"]
|
||||
docs-paths: ["docs"]
|
||||
analysis-paths: ["analysis"]
|
||||
test-paths: ["tests"]
|
||||
data-paths: ["data"]
|
||||
macro-paths: ["macros"]
|
||||
|
||||
target-path: "../build" # directory which will store compiled SQL files
|
||||
log-path: "../logs" # directory which will store DBT logs
|
||||
modules-path: "../dbt_modules" # directory which will store external DBT dependencies
|
||||
|
||||
clean-targets: # directories to be removed by `dbt clean`
|
||||
- "build"
|
||||
- "dbt_modules"
|
||||
|
||||
quoting:
|
||||
database: true
|
||||
# Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785)
|
||||
# all schemas should be unquoted
|
||||
schema: false
|
||||
identifier: true
|
||||
|
||||
# You can define configurations for models in the `source-paths` directory here.
|
||||
# Using these configurations, you can enable or disable models, change how they
|
||||
# are materialized, and more!
|
||||
models:
|
||||
airbyte_utils:
|
||||
+materialized: table
|
||||
generated:
|
||||
airbyte_ctes:
|
||||
+tags: airbyte_internal_cte
|
||||
+materialized: ephemeral
|
||||
airbyte_incremental:
|
||||
+tags: incremental_tables
|
||||
+materialized: incremental
|
||||
+on_schema_change: sync_all_columns
|
||||
airbyte_tables:
|
||||
+tags: normalized_tables
|
||||
+materialized: table
|
||||
airbyte_views:
|
||||
+tags: airbyte_internal_views
|
||||
+materialized: view
|
||||
|
||||
dispatch:
|
||||
- macro_namespace: dbt_utils
|
||||
search_order: ['airbyte_utils', 'dbt_utils']
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1 @@
|
|||
{"ssl":false,"host":"localhost","port":5555,"schema":"public","database":"selly_etl","password":"123","username":"selly","tunnel_method":{"tunnel_method":"NO_TUNNEL"}}
|
|
@ -0,0 +1,166 @@
|
|||
{#
|
||||
Adapter Macros for the following functions:
|
||||
- Bigquery: unnest() -> https://cloud.google.com/bigquery/docs/reference/standard-sql/arrays#flattening-arrays-and-repeated-fields
|
||||
- Snowflake: flatten() -> https://docs.snowflake.com/en/sql-reference/functions/flatten.html
|
||||
- Redshift: -> https://blog.getdbt.com/how-to-unnest-arrays-in-redshift/
|
||||
- postgres: unnest() -> https://www.postgresqltutorial.com/postgresql-array/
|
||||
- MSSQL: openjson() –> https://docs.microsoft.com/en-us/sql/relational-databases/json/validate-query-and-change-json-data-with-built-in-functions-sql-server?view=sql-server-ver15
|
||||
#}
|
||||
|
||||
{# cross_join_unnest ------------------------------------------------- #}
|
||||
|
||||
{% macro cross_join_unnest(stream_name, array_col) -%}
|
||||
{{ adapter.dispatch('cross_join_unnest')(stream_name, array_col) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__cross_join_unnest(stream_name, array_col) -%}
|
||||
{% do exceptions.warn("Undefined macro cross_join_unnest for this destination engine") %}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro bigquery__cross_join_unnest(stream_name, array_col) -%}
|
||||
cross join unnest({{ array_col }}) as {{ array_col }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro oracle__cross_join_unnest(stream_name, array_col) -%}
|
||||
{% do exceptions.warn("Normalization does not support unnesting for Oracle yet.") %}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro postgres__cross_join_unnest(stream_name, array_col) -%}
|
||||
cross join jsonb_array_elements(
|
||||
case jsonb_typeof({{ array_col }})
|
||||
when 'array' then {{ array_col }}
|
||||
else '[]' end
|
||||
) as _airbyte_nested_data
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro mysql__cross_join_unnest(stream_name, array_col) -%}
|
||||
left join joined on _airbyte_{{ stream_name }}_hashid = joined._airbyte_hashid
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro redshift__cross_join_unnest(stream_name, array_col) -%}
|
||||
left join joined on _airbyte_{{ stream_name }}_hashid = joined._airbyte_hashid
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro snowflake__cross_join_unnest(stream_name, array_col) -%}
|
||||
cross join table(flatten({{ array_col }})) as {{ array_col }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro sqlserver__cross_join_unnest(stream_name, array_col) -%}
|
||||
{# https://docs.microsoft.com/en-us/sql/relational-databases/json/convert-json-data-to-rows-and-columns-with-openjson-sql-server?view=sql-server-ver15#option-1---openjson-with-the-default-output #}
|
||||
CROSS APPLY (
|
||||
SELECT [value] = CASE
|
||||
WHEN [type] = 4 THEN (SELECT [value] FROM OPENJSON([value]))
|
||||
WHEN [type] = 5 THEN [value]
|
||||
END
|
||||
FROM OPENJSON({{ array_col }})
|
||||
) AS {{ array_col }}
|
||||
{%- endmacro %}
|
||||
|
||||
{# unnested_column_value -- this macro is related to unnest_cte #}
|
||||
|
||||
{% macro unnested_column_value(column_col) -%}
|
||||
{{ adapter.dispatch('unnested_column_value')(column_col) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__unnested_column_value(column_col) -%}
|
||||
{{ column_col }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro postgres__unnested_column_value(column_col) -%}
|
||||
_airbyte_nested_data
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro snowflake__unnested_column_value(column_col) -%}
|
||||
{{ column_col }}.value
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro redshift__unnested_column_value(column_col) -%}
|
||||
_airbyte_nested_data
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro mysql__unnested_column_value(column_col) -%}
|
||||
_airbyte_nested_data
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro oracle__unnested_column_value(column_col) -%}
|
||||
{{ column_col }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro sqlserver__unnested_column_value(column_col) -%}
|
||||
{# unnested array/sub_array will be located in `value` column afterwards, we need to address to it #}
|
||||
{{ column_col }}.value
|
||||
{%- endmacro %}
|
||||
|
||||
{# unnest_cte ------------------------------------------------- #}
|
||||
|
||||
{% macro unnest_cte(from_table, stream_name, column_col) -%}
|
||||
{{ adapter.dispatch('unnest_cte')(from_table, stream_name, column_col) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__unnest_cte(from_table, stream_name, column_col) -%}{%- endmacro %}
|
||||
|
||||
{# -- based on https://blog.getdbt.com/how-to-unnest-arrays-in-redshift/ #}
|
||||
{% macro redshift__unnest_cte(from_table, stream_name, column_col) -%}
|
||||
{%- if not execute -%}
|
||||
{{ return('') }}
|
||||
{% endif %}
|
||||
{%- call statement('max_json_array_length', fetch_result=True) -%}
|
||||
with max_value as (
|
||||
select max(json_array_length({{ column_col }}, true)) as max_number_of_items
|
||||
from {{ from_table }}
|
||||
)
|
||||
select
|
||||
case when max_number_of_items is not null and max_number_of_items > 1
|
||||
then max_number_of_items
|
||||
else 1 end as max_number_of_items
|
||||
from max_value
|
||||
{%- endcall -%}
|
||||
{%- set max_length = load_result('max_json_array_length') -%}
|
||||
with numbers as (
|
||||
{{dbt_utils.generate_series(max_length["data"][0][0])}}
|
||||
),
|
||||
joined as (
|
||||
select
|
||||
_airbyte_{{ stream_name }}_hashid as _airbyte_hashid,
|
||||
json_extract_array_element_text({{ column_col }}, numbers.generated_number::int - 1, true) as _airbyte_nested_data
|
||||
from {{ from_table }}
|
||||
cross join numbers
|
||||
-- only generate the number of records in the cross join that corresponds
|
||||
-- to the number of items in {{ from_table }}.{{ column_col }}
|
||||
where numbers.generated_number <= json_array_length({{ column_col }}, true)
|
||||
)
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro mysql__unnest_cte(from_table, stream_name, column_col) -%}
|
||||
{%- if not execute -%}
|
||||
{{ return('') }}
|
||||
{% endif %}
|
||||
|
||||
{%- call statement('max_json_array_length', fetch_result=True) -%}
|
||||
with max_value as (
|
||||
select max(json_length({{ column_col }})) as max_number_of_items
|
||||
from {{ from_table }}
|
||||
)
|
||||
select
|
||||
case when max_number_of_items is not null and max_number_of_items > 1
|
||||
then max_number_of_items
|
||||
else 1 end as max_number_of_items
|
||||
from max_value
|
||||
{%- endcall -%}
|
||||
|
||||
{%- set max_length = load_result('max_json_array_length') -%}
|
||||
with numbers as (
|
||||
{{ dbt_utils.generate_series(max_length["data"][0][0]) }}
|
||||
),
|
||||
joined as (
|
||||
select
|
||||
_airbyte_{{ stream_name }}_hashid as _airbyte_hashid,
|
||||
{# -- json_extract(column_col, '$[i][0]') as _airbyte_nested_data #}
|
||||
json_extract({{ column_col }}, concat("$[", numbers.generated_number - 1, "][0]")) as _airbyte_nested_data
|
||||
from {{ from_table }}
|
||||
cross join numbers
|
||||
-- only generate the number of records in the cross join that corresponds
|
||||
-- to the number of items in {{ from_table }}.{{ column_col }}
|
||||
where numbers.generated_number <= json_length({{ column_col }})
|
||||
)
|
||||
{%- endmacro %}
|
|
@ -0,0 +1,23 @@
|
|||
{#
|
||||
concat in dbt 0.6.4 used to work fine for bigquery but the new implementaion in 0.7.3 is less scalable (can not handle too many columns)
|
||||
Therefore, we revert the implementation here and add versions for missing destinations
|
||||
#}
|
||||
|
||||
{% macro concat(fields) -%}
|
||||
{{ adapter.dispatch('concat')(fields) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro bigquery__concat(fields) -%}
|
||||
{#-- concat() in SQL bigquery scales better with number of columns than using the '||' operator --#}
|
||||
concat({{ fields|join(', ') }})
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro sqlserver__concat(fields) -%}
|
||||
{#-- CONCAT() in SQL SERVER accepts from 2 to 254 arguments, we use batches for the main concat, to overcome the limit. --#}
|
||||
{% set concat_chunks = [] %}
|
||||
{% for chunk in fields|batch(253) -%}
|
||||
{% set _ = concat_chunks.append( "concat(" ~ chunk|join(', ') ~ ",'')" ) %}
|
||||
{% endfor %}
|
||||
|
||||
concat({{ concat_chunks|join(', ') }}, '')
|
||||
{%- endmacro %}
|
|
@ -0,0 +1,7 @@
|
|||
{% macro mysql__current_timestamp() %}
|
||||
CURRENT_TIMESTAMP
|
||||
{% endmacro %}
|
||||
|
||||
{% macro oracle__current_timestamp() %}
|
||||
CURRENT_TIMESTAMP
|
||||
{% endmacro %}
|
|
@ -0,0 +1,181 @@
|
|||
{# json ------------------------------------------------- #}
|
||||
|
||||
{%- macro type_json() -%}
|
||||
{{ adapter.dispatch('type_json')() }}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro default__type_json() %}
|
||||
string
|
||||
{% endmacro %}
|
||||
|
||||
{%- macro redshift__type_json() -%}
|
||||
varchar
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro postgres__type_json() %}
|
||||
jsonb
|
||||
{% endmacro %}
|
||||
|
||||
{%- macro oracle__type_json() -%}
|
||||
varchar2(4000)
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro snowflake__type_json() %}
|
||||
variant
|
||||
{% endmacro %}
|
||||
|
||||
{%- macro mysql__type_json() -%}
|
||||
json
|
||||
{%- endmacro -%}
|
||||
|
||||
{%- macro sqlserver__type_json() -%}
|
||||
VARCHAR(max)
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro clickhouse__type_json() %}
|
||||
String
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{# string ------------------------------------------------- #}
|
||||
|
||||
{%- macro mysql__type_string() -%}
|
||||
char
|
||||
{%- endmacro -%}
|
||||
|
||||
{%- macro oracle__type_string() -%}
|
||||
varchar2(4000)
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro sqlserver__type_string() %}
|
||||
VARCHAR(max)
|
||||
{%- endmacro -%}
|
||||
|
||||
{%- macro clickhouse__type_string() -%}
|
||||
String
|
||||
{%- endmacro -%}
|
||||
|
||||
|
||||
{# float ------------------------------------------------- #}
|
||||
{% macro mysql__type_float() %}
|
||||
float
|
||||
{% endmacro %}
|
||||
|
||||
{% macro oracle__type_float() %}
|
||||
float
|
||||
{% endmacro %}
|
||||
|
||||
{% macro clickhouse__type_float() %}
|
||||
Float64
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{# int ------------------------------------------------- #}
|
||||
{% macro default__type_int() %}
|
||||
signed
|
||||
{% endmacro %}
|
||||
|
||||
{% macro oracle__type_int() %}
|
||||
int
|
||||
{% endmacro %}
|
||||
|
||||
{% macro clickhouse__type_int() %}
|
||||
INT
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{# bigint ------------------------------------------------- #}
|
||||
{% macro mysql__type_bigint() %}
|
||||
signed
|
||||
{% endmacro %}
|
||||
|
||||
{% macro oracle__type_bigint() %}
|
||||
numeric
|
||||
{% endmacro %}
|
||||
|
||||
{% macro clickhouse__type_bigint() %}
|
||||
BIGINT
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{# numeric ------------------------------------------------- --#}
|
||||
{% macro mysql__type_numeric() %}
|
||||
float
|
||||
{% endmacro %}
|
||||
|
||||
{% macro clickhouse__type_numeric() %}
|
||||
Float64
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{# timestamp ------------------------------------------------- --#}
|
||||
{% macro mysql__type_timestamp() %}
|
||||
time
|
||||
{% endmacro %}
|
||||
|
||||
{%- macro sqlserver__type_timestamp() -%}
|
||||
{#-- in TSQL timestamp is really datetime --#}
|
||||
{#-- https://docs.microsoft.com/en-us/sql/t-sql/functions/date-and-time-data-types-and-functions-transact-sql?view=sql-server-ver15#DateandTimeDataTypes --#}
|
||||
datetime
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro clickhouse__type_timestamp() %}
|
||||
DateTime64
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{# timestamp with time zone ------------------------------------------------- #}
|
||||
|
||||
{%- macro type_timestamp_with_timezone() -%}
|
||||
{{ adapter.dispatch('type_timestamp_with_timezone')() }}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro default__type_timestamp_with_timezone() %}
|
||||
timestamp with time zone
|
||||
{% endmacro %}
|
||||
|
||||
{% macro bigquery__type_timestamp_with_timezone() %}
|
||||
timestamp
|
||||
{% endmacro %}
|
||||
|
||||
{#-- MySQL doesnt allow cast operation to work with TIMESTAMP so we have to use char --#}
|
||||
{%- macro mysql__type_timestamp_with_timezone() -%}
|
||||
char
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro oracle__type_timestamp_with_timezone() %}
|
||||
varchar2(4000)
|
||||
{% endmacro %}
|
||||
|
||||
{%- macro sqlserver__type_timestamp_with_timezone() -%}
|
||||
{#-- in TSQL timestamp is really datetime or datetime2 --#}
|
||||
{#-- https://docs.microsoft.com/en-us/sql/t-sql/functions/date-and-time-data-types-and-functions-transact-sql?view=sql-server-ver15#DateandTimeDataTypes --#}
|
||||
datetime
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro clickhouse__type_timestamp_with_timezone() %}
|
||||
DateTime64
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{# date ------------------------------------------------- #}
|
||||
|
||||
{%- macro type_date() -%}
|
||||
{{ adapter.dispatch('type_date')() }}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro default__type_date() %}
|
||||
date
|
||||
{% endmacro %}
|
||||
|
||||
{% macro oracle__type_date() %}
|
||||
varchar2(4000)
|
||||
{% endmacro %}
|
||||
|
||||
{%- macro sqlserver__type_date() -%}
|
||||
date
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro clickhouse__type_date() %}
|
||||
Date
|
||||
{% endmacro %}
|
|
@ -0,0 +1,7 @@
|
|||
{% macro mysql__except() %}
|
||||
{% do exceptions.warn("MySQL does not support EXCEPT operator") %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro oracle__except() %}
|
||||
minus
|
||||
{% endmacro %}
|
|
@ -0,0 +1,5 @@
|
|||
{# converting hash in varchar _macro #}
|
||||
|
||||
{% macro sqlserver__hash(field) -%}
|
||||
convert(varchar(32), HashBytes('md5', coalesce(cast({{field}} as {{dbt_utils.type_string()}}), '')), 2)
|
||||
{%- endmacro %}
|
|
@ -0,0 +1,226 @@
|
|||
{#
|
||||
Adapter Macros for the following functions:
|
||||
- Bigquery: JSON_EXTRACT(json_string_expr, json_path_format) -> https://cloud.google.com/bigquery/docs/reference/standard-sql/json_functions
|
||||
- Snowflake: JSON_EXTRACT_PATH_TEXT( <column_identifier> , '<path_name>' ) -> https://docs.snowflake.com/en/sql-reference/functions/json_extract_path_text.html
|
||||
- Redshift: json_extract_path_text('json_string', 'path_elem' [,'path_elem'[, ...] ] [, null_if_invalid ] ) -> https://docs.aws.amazon.com/redshift/latest/dg/JSON_EXTRACT_PATH_TEXT.html
|
||||
- Postgres: json_extract_path_text(<from_json>, 'path' [, 'path' [, ...}}) -> https://www.postgresql.org/docs/12/functions-json.html
|
||||
- MySQL: JSON_EXTRACT(json_doc, 'path' [, 'path'] ...) -> https://dev.mysql.com/doc/refman/8.0/en/json-search-functions.html
|
||||
- ClickHouse: JSONExtractString(json_doc, 'path' [, 'path'] ...) -> https://clickhouse.com/docs/en/sql-reference/functions/json-functions/
|
||||
#}
|
||||
|
||||
{# format_json_path -------------------------------------------------- #}
|
||||
{% macro format_json_path(json_path_list) -%}
|
||||
{{ adapter.dispatch('format_json_path')(json_path_list) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__format_json_path(json_path_list) -%}
|
||||
{{ '.' ~ json_path_list|join('.') }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro oracle__format_json_path(json_path_list) -%}
|
||||
{{ '\'$."' ~ json_path_list|join('."') ~ '"\'' }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro bigquery__format_json_path(json_path_list) -%}
|
||||
{%- set str_list = [] -%}
|
||||
{%- for json_path in json_path_list -%}
|
||||
{%- if str_list.append(json_path.replace('"', '\\"')) -%} {%- endif -%}
|
||||
{%- endfor -%}
|
||||
{{ '"$[\'' ~ str_list|join('\'][\'') ~ '\']"' }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro postgres__format_json_path(json_path_list) -%}
|
||||
{%- set str_list = [] -%}
|
||||
{%- for json_path in json_path_list -%}
|
||||
{%- if str_list.append(json_path.replace("'", "''")) -%} {%- endif -%}
|
||||
{%- endfor -%}
|
||||
{{ "'" ~ str_list|join("','") ~ "'" }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro mysql__format_json_path(json_path_list) -%}
|
||||
{# -- '$."x"."y"."z"' #}
|
||||
{{ "'$.\"" ~ json_path_list|join(".") ~ "\"'" }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro redshift__format_json_path(json_path_list) -%}
|
||||
{%- set str_list = [] -%}
|
||||
{%- for json_path in json_path_list -%}
|
||||
{%- if str_list.append(json_path.replace("'", "''")) -%} {%- endif -%}
|
||||
{%- endfor -%}
|
||||
{{ "'" ~ str_list|join("','") ~ "'" }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro snowflake__format_json_path(json_path_list) -%}
|
||||
{%- set str_list = [] -%}
|
||||
{%- for json_path in json_path_list -%}
|
||||
{%- if str_list.append(json_path.replace("'", "''").replace('"', '""')) -%} {%- endif -%}
|
||||
{%- endfor -%}
|
||||
{{ "'\"" ~ str_list|join('"."') ~ "\"'" }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro sqlserver__format_json_path(json_path_list) -%}
|
||||
{# -- '$."x"."y"."z"' #}
|
||||
{%- set str_list = [] -%}
|
||||
{%- for json_path in json_path_list -%}
|
||||
{%- if str_list.append(json_path.replace("'", "''").replace('"', '\\"')) -%} {%- endif -%}
|
||||
{%- endfor -%}
|
||||
{{ "'$.\"" ~ str_list|join(".") ~ "\"'" }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro clickhouse__format_json_path(json_path_list) -%}
|
||||
{%- set str_list = [] -%}
|
||||
{%- for json_path in json_path_list -%}
|
||||
{%- if str_list.append(json_path.replace("'", "''").replace('"', '\\"')) -%} {%- endif -%}
|
||||
{%- endfor -%}
|
||||
{{ "'" ~ str_list|join("','") ~ "'" }}
|
||||
{%- endmacro %}
|
||||
|
||||
{# json_extract ------------------------------------------------- #}
|
||||
|
||||
{% macro json_extract(from_table, json_column, json_path_list, normalized_json_path) -%}
|
||||
{{ adapter.dispatch('json_extract')(from_table, json_column, json_path_list, normalized_json_path) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__json_extract(from_table, json_column, json_path_list, normalized_json_path) -%}
|
||||
json_extract({{ from_table}}.{{ json_column }}, {{ format_json_path(json_path_list) }})
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro oracle__json_extract(from_table, json_column, json_path_list, normalized_json_path) -%}
|
||||
json_value({{ json_column }}, {{ format_json_path(normalized_json_path) }})
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro bigquery__json_extract(from_table, json_column, json_path_list, normalized_json_path) -%}
|
||||
{%- if from_table|string() == '' %}
|
||||
json_extract({{ json_column }}, {{ format_json_path(normalized_json_path) }})
|
||||
{% else %}
|
||||
json_extract({{ from_table}}.{{ json_column }}, {{ format_json_path(normalized_json_path) }})
|
||||
{% endif -%}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro postgres__json_extract(from_table, json_column, json_path_list, normalized_json_path) -%}
|
||||
{%- if from_table|string() == '' %}
|
||||
jsonb_extract_path({{ json_column }}, {{ format_json_path(json_path_list) }})
|
||||
{% else %}
|
||||
jsonb_extract_path({{ from_table }}.{{ json_column }}, {{ format_json_path(json_path_list) }})
|
||||
{% endif -%}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro mysql__json_extract(from_table, json_column, json_path_list, normalized_json_path) -%}
|
||||
{%- if from_table|string() == '' %}
|
||||
json_extract({{ json_column }}, {{ format_json_path(normalized_json_path) }})
|
||||
{% else %}
|
||||
json_extract({{ from_table }}.{{ json_column }}, {{ format_json_path(normalized_json_path) }})
|
||||
{% endif -%}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro redshift__json_extract(from_table, json_column, json_path_list, normalized_json_path) -%}
|
||||
{%- if from_table|string() == '' %}
|
||||
case when json_extract_path_text({{ json_column }}, {{ format_json_path(json_path_list) }}, true) != '' then json_extract_path_text({{ json_column }}, {{ format_json_path(json_path_list) }}, true) end
|
||||
{% else %}
|
||||
case when json_extract_path_text({{ from_table }}.{{ json_column }}, {{ format_json_path(json_path_list) }}, true) != '' then json_extract_path_text({{ from_table }}.{{ json_column }}, {{ format_json_path(json_path_list) }}, true) end
|
||||
{% endif -%}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro snowflake__json_extract(from_table, json_column, json_path_list, normalized_json_path) -%}
|
||||
{%- if from_table|string() == '' %}
|
||||
get_path(parse_json({{ json_column }}), {{ format_json_path(json_path_list) }})
|
||||
{% else %}
|
||||
get_path(parse_json({{ from_table }}.{{ json_column }}), {{ format_json_path(json_path_list) }})
|
||||
{% endif -%}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro sqlserver__json_extract(from_table, json_column, json_path_list, normalized_json_path) -%}
|
||||
json_query({{ json_column }}, {{ format_json_path(json_path_list) }})
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro clickhouse__json_extract(from_table, json_column, json_path_list, normalized_json_path) -%}
|
||||
{%- if from_table|string() == '' %}
|
||||
JSONExtractRaw({{ json_column }}, {{ format_json_path(json_path_list) }})
|
||||
{% else %}
|
||||
JSONExtractRaw({{ from_table }}.{{ json_column }}, {{ format_json_path(json_path_list) }})
|
||||
{% endif -%}
|
||||
{%- endmacro %}
|
||||
|
||||
{# json_extract_scalar ------------------------------------------------- #}
|
||||
|
||||
{% macro json_extract_scalar(json_column, json_path_list, normalized_json_path) -%}
|
||||
{{ adapter.dispatch('json_extract_scalar')(json_column, json_path_list, normalized_json_path) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__json_extract_scalar(json_column, json_path_list, normalized_json_path) -%}
|
||||
json_extract_scalar({{ json_column }}, {{ format_json_path(json_path_list) }})
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro oracle__json_extract_scalar(json_column, json_path_list, normalized_json_path) -%}
|
||||
json_value({{ json_column }}, {{ format_json_path(normalized_json_path) }})
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro bigquery__json_extract_scalar(json_column, json_path_list, normalized_json_path) -%}
|
||||
json_extract_scalar({{ json_column }}, {{ format_json_path(normalized_json_path) }})
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro postgres__json_extract_scalar(json_column, json_path_list, normalized_json_path) -%}
|
||||
jsonb_extract_path_text({{ json_column }}, {{ format_json_path(json_path_list) }})
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro mysql__json_extract_scalar(json_column, json_path_list, normalized_json_path) -%}
|
||||
json_value({{ json_column }}, {{ format_json_path(normalized_json_path) }})
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro redshift__json_extract_scalar(json_column, json_path_list, normalized_json_path) -%}
|
||||
case when json_extract_path_text({{ json_column }}, {{ format_json_path(json_path_list) }}, true) != '' then json_extract_path_text({{ json_column }}, {{ format_json_path(json_path_list) }}, true) end
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro snowflake__json_extract_scalar(json_column, json_path_list, normalized_json_path) -%}
|
||||
to_varchar(get_path(parse_json({{ json_column }}), {{ format_json_path(json_path_list) }}))
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro sqlserver__json_extract_scalar(json_column, json_path_list, normalized_json_path) -%}
|
||||
json_value({{ json_column }}, {{ format_json_path(json_path_list) }})
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro clickhouse__json_extract_scalar(json_column, json_path_list, normalized_json_path) -%}
|
||||
JSONExtractRaw({{ json_column }}, {{ format_json_path(json_path_list) }})
|
||||
{%- endmacro %}
|
||||
|
||||
{# json_extract_array ------------------------------------------------- #}
|
||||
|
||||
{% macro json_extract_array(json_column, json_path_list, normalized_json_path) -%}
|
||||
{{ adapter.dispatch('json_extract_array')(json_column, json_path_list, normalized_json_path) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__json_extract_array(json_column, json_path_list, normalized_json_path) -%}
|
||||
json_extract_array({{ json_column }}, {{ format_json_path(json_path_list) }})
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro oracle__json_extract_array(json_column, json_path_list, normalized_json_path) -%}
|
||||
json_value({{ json_column }}, {{ format_json_path(normalized_json_path) }})
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro bigquery__json_extract_array(json_column, json_path_list, normalized_json_path) -%}
|
||||
json_extract_array({{ json_column }}, {{ format_json_path(normalized_json_path) }})
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro postgres__json_extract_array(json_column, json_path_list, normalized_json_path) -%}
|
||||
jsonb_extract_path({{ json_column }}, {{ format_json_path(json_path_list) }})
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro mysql__json_extract_array(json_column, json_path_list, normalized_json_path) -%}
|
||||
json_extract({{ json_column }}, {{ format_json_path(normalized_json_path) }})
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro redshift__json_extract_array(json_column, json_path_list, normalized_json_path) -%}
|
||||
json_extract_path_text({{ json_column }}, {{ format_json_path(json_path_list) }}, true)
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro snowflake__json_extract_array(json_column, json_path_list, normalized_json_path) -%}
|
||||
get_path(parse_json({{ json_column }}), {{ format_json_path(json_path_list) }})
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro sqlserver__json_extract_array(json_column, json_path_list, normalized_json_path) -%}
|
||||
json_query({{ json_column }}, {{ format_json_path(json_path_list) }})
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro clickhouse__json_extract_array(json_column, json_path_list, normalized_json_path) -%}
|
||||
JSONExtractArrayRaw({{ json_column }}, {{ format_json_path(json_path_list) }})
|
||||
{%- endmacro %}
|
|
@ -0,0 +1,16 @@
|
|||
{# quote ---------------------------------- #}
|
||||
{% macro quote(column_name) -%}
|
||||
{{ adapter.dispatch('quote')(column_name) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__quote(column_name) -%}
|
||||
adapter.quote(column_name)
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro oracle__quote(column_name) -%}
|
||||
{{ '\"' ~ column_name ~ '\"'}}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro clickhouse__quote(column_name) -%}
|
||||
{{ '\"' ~ column_name ~ '\"'}}
|
||||
{%- endmacro %}
|
|
@ -0,0 +1,25 @@
|
|||
{# surrogate_key ---------------------------------- #}
|
||||
|
||||
{% macro oracle__surrogate_key(field_list) -%}
|
||||
ora_hash(
|
||||
{%- for field in field_list %}
|
||||
{% if not loop.last %}
|
||||
{{ field }} || '~' ||
|
||||
{% else %}
|
||||
{{ field }}
|
||||
{% endif %}
|
||||
{%- endfor %}
|
||||
)
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro clickhouse__surrogate_key(field_list) -%}
|
||||
assumeNotNull(hex(MD5(
|
||||
{%- for field in field_list %}
|
||||
{% if not loop.last %}
|
||||
toString({{ field }}) || '~' ||
|
||||
{% else %}
|
||||
toString({{ field }})
|
||||
{% endif %}
|
||||
{%- endfor %}
|
||||
)))
|
||||
{%- endmacro %}
|
|
@ -0,0 +1,67 @@
|
|||
|
||||
{# boolean_to_string ------------------------------------------------- #}
|
||||
{% macro boolean_to_string(boolean_column) -%}
|
||||
{{ adapter.dispatch('boolean_to_string')(boolean_column) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__boolean_to_string(boolean_column) -%}
|
||||
{{ boolean_column }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro redshift__boolean_to_string(boolean_column) -%}
|
||||
case when {{ boolean_column }} then 'true' else 'false' end
|
||||
{%- endmacro %}
|
||||
|
||||
{# array_to_string ------------------------------------------------- #}
|
||||
{% macro array_to_string(array_column) -%}
|
||||
{{ adapter.dispatch('array_to_string')(array_column) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__array_to_string(array_column) -%}
|
||||
{{ array_column }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro bigquery__array_to_string(array_column) -%}
|
||||
array_to_string({{ array_column }}, "|", "")
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro oracle__array_to_string(array_column) -%}
|
||||
cast({{ array_column }} as varchar2(4000))
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro sqlserver__array_to_string(array_column) -%}
|
||||
cast({{ array_column }} as {{dbt_utils.type_string()}})
|
||||
{%- endmacro %}
|
||||
|
||||
{# cast_to_boolean ------------------------------------------------- #}
|
||||
{% macro cast_to_boolean(field) -%}
|
||||
{{ adapter.dispatch('cast_to_boolean')(field) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__cast_to_boolean(field) -%}
|
||||
cast({{ field }} as boolean)
|
||||
{%- endmacro %}
|
||||
|
||||
{# -- MySQL does not support cast function converting string directly to boolean (an alias of tinyint(1), https://dev.mysql.com/doc/refman/8.0/en/cast-functions.html#function_cast #}
|
||||
{% macro mysql__cast_to_boolean(field) -%}
|
||||
IF(lower({{ field }}) = 'true', true, false)
|
||||
{%- endmacro %}
|
||||
|
||||
{# -- Redshift does not support converting string directly to boolean, it must go through int first #}
|
||||
{% macro redshift__cast_to_boolean(field) -%}
|
||||
cast(decode({{ field }}, 'true', '1', 'false', '0')::integer as boolean)
|
||||
{%- endmacro %}
|
||||
|
||||
{# -- MS SQL Server does not support converting string directly to boolean, it must be casted as bit #}
|
||||
{% macro sqlserver__cast_to_boolean(field) -%}
|
||||
cast({{ field }} as bit)
|
||||
{%- endmacro %}
|
||||
|
||||
{# empty_string_to_null ------------------------------------------------- #}
|
||||
{% macro empty_string_to_null(field) -%}
|
||||
{{ return(adapter.dispatch('empty_string_to_null')(field)) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{%- macro default__empty_string_to_null(field) -%}
|
||||
nullif({{ field }}, '')
|
||||
{%- endmacro %}
|
|
@ -0,0 +1,4 @@
|
|||
-- see https://docs.getdbt.com/docs/building-a-dbt-project/building-models/using-custom-schemas/#an-alternative-pattern-for-generating-schema-names
|
||||
{% macro generate_schema_name(custom_schema_name, node) -%}
|
||||
{{ generate_schema_name_for_env(custom_schema_name, node) }}
|
||||
{%- endmacro %}
|
|
@ -0,0 +1,51 @@
|
|||
{#
|
||||
These macros control how incremental models are updated in Airbyte's normalization step
|
||||
- get_max_normalized_cursor retrieve the value of the last normalized data
|
||||
- incremental_clause controls the predicate to filter on new data to process incrementally
|
||||
#}
|
||||
|
||||
{% macro incremental_clause(col_emitted_at) -%}
|
||||
{{ adapter.dispatch('incremental_clause')(col_emitted_at) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{%- macro default__incremental_clause(col_emitted_at) -%}
|
||||
{% if is_incremental() %}
|
||||
and coalesce(
|
||||
cast({{ col_emitted_at }} as {{ type_timestamp_with_timezone() }}) >= (select max(cast({{ col_emitted_at }} as {{ type_timestamp_with_timezone() }})) from {{ this }}),
|
||||
{# -- if {{ col_emitted_at }} is NULL in either table, the previous comparison would evaluate to NULL, #}
|
||||
{# -- so we coalesce and make sure the row is always returned for incremental processing instead #}
|
||||
true)
|
||||
{% endif %}
|
||||
{%- endmacro -%}
|
||||
|
||||
{# -- see https://on-systems.tech/113-beware-dbt-incremental-updates-against-snowflake-external-tables/ #}
|
||||
{%- macro snowflake__incremental_clause(col_emitted_at) -%}
|
||||
{% if is_incremental() %}
|
||||
{% if get_max_normalized_cursor(col_emitted_at) %}
|
||||
and cast({{ col_emitted_at }} as {{ type_timestamp_with_timezone() }}) >=
|
||||
cast('{{ get_max_normalized_cursor(col_emitted_at) }}' as {{ type_timestamp_with_timezone() }})
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{%- endmacro -%}
|
||||
|
||||
{%- macro sqlserver__incremental_clause(col_emitted_at) -%}
|
||||
{% if is_incremental() %}
|
||||
and ((select max(cast({{ col_emitted_at }} as {{ type_timestamp_with_timezone() }})) from {{ this }}) is null
|
||||
or cast({{ col_emitted_at }} as {{ type_timestamp_with_timezone() }}) >=
|
||||
(select max(cast({{ col_emitted_at }} as {{ type_timestamp_with_timezone() }})) from {{ this }}))
|
||||
{% endif %}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro get_max_normalized_cursor(col_emitted_at) %}
|
||||
{% if execute and is_incremental() %}
|
||||
{% if env_var('INCREMENTAL_CURSOR', 'UNSET') == 'UNSET' %}
|
||||
{% set query %}
|
||||
select max(cast({{ col_emitted_at }} as {{ type_timestamp_with_timezone() }})) from {{ this }}
|
||||
{% endset %}
|
||||
{% set max_cursor = run_query(query).columns[0][0] %}
|
||||
{% do return(max_cursor) %}
|
||||
{% else %}
|
||||
{% do return(env_var('INCREMENTAL_CURSOR')) %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
|
@ -0,0 +1,34 @@
|
|||
{% macro oracle__test_equal_rowcount(model, compare_model) %}
|
||||
|
||||
{#-- Needs to be set at parse time, before we return '' below --#}
|
||||
{{ config(fail_calc = 'coalesce(diff_count, 0)') }}
|
||||
|
||||
{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}
|
||||
{%- if not execute -%}
|
||||
{{ return('') }}
|
||||
{% endif %}
|
||||
|
||||
with a as (
|
||||
|
||||
select count(*) as count_a from {{ model }}
|
||||
|
||||
),
|
||||
b as (
|
||||
|
||||
select count(*) as count_b from {{ compare_model }}
|
||||
|
||||
),
|
||||
final as (
|
||||
|
||||
select
|
||||
count_a,
|
||||
count_b,
|
||||
abs(count_a - count_b) as diff_count
|
||||
from a
|
||||
cross join b
|
||||
|
||||
)
|
||||
|
||||
select diff_count from final
|
||||
|
||||
{% endmacro %}
|
|
@ -0,0 +1,107 @@
|
|||
{#
|
||||
-- Adapted from https://github.com/dbt-labs/dbt-utils/blob/0-19-0-updates/macros/schema_tests/equality.sql
|
||||
-- dbt-utils version: 0.6.4
|
||||
-- This macro needs to be updated accordingly when dbt-utils is upgraded.
|
||||
-- This is needed because MySQL does not support the EXCEPT operator!
|
||||
#}
|
||||
|
||||
{% macro mysql__test_equality(model, compare_model, compare_columns=None) %}
|
||||
|
||||
{%- if not execute -%}
|
||||
{{ return('') }}
|
||||
{% endif %}
|
||||
|
||||
{%- do dbt_utils._is_relation(model, 'test_equality') -%}
|
||||
|
||||
{%- if not compare_columns -%}
|
||||
{%- do dbt_utils._is_ephemeral(model, 'test_equality') -%}
|
||||
{%- set compare_columns = adapter.get_columns_in_relation(model) | map(attribute='quoted') -%}
|
||||
{%- endif -%}
|
||||
|
||||
{% set compare_cols_csv = compare_columns | join(', ') %}
|
||||
|
||||
with a as (
|
||||
select * from {{ model }}
|
||||
),
|
||||
|
||||
b as (
|
||||
select * from {{ compare_model }}
|
||||
),
|
||||
|
||||
a_minus_b as (
|
||||
select {{ compare_cols_csv }} from a
|
||||
where ({{ compare_cols_csv }}) not in
|
||||
(select {{ compare_cols_csv }} from b)
|
||||
),
|
||||
|
||||
b_minus_a as (
|
||||
select {{ compare_cols_csv }} from b
|
||||
where ({{ compare_cols_csv }}) not in
|
||||
(select {{ compare_cols_csv }} from a)
|
||||
),
|
||||
|
||||
unioned as (
|
||||
select * from a_minus_b
|
||||
union all
|
||||
select * from b_minus_a
|
||||
),
|
||||
|
||||
final as (
|
||||
select (select count(*) from unioned) +
|
||||
(select abs(
|
||||
(select count(*) from a_minus_b) -
|
||||
(select count(*) from b_minus_a)
|
||||
))
|
||||
as count
|
||||
)
|
||||
|
||||
select count from final
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
{% macro oracle__test_equality(model) %}
|
||||
{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}
|
||||
{%- if not execute -%}
|
||||
{{ return('') }}
|
||||
{% endif %}
|
||||
|
||||
-- setup
|
||||
{%- do dbt_utils._is_relation(model, 'test_equality') -%}
|
||||
|
||||
{#-
|
||||
If the compare_cols arg is provided, we can run this test without querying the
|
||||
information schema — this allows the model to be an ephemeral model
|
||||
-#}
|
||||
{%- set compare_columns = kwargs.get('compare_columns', None) -%}
|
||||
|
||||
{%- if not compare_columns -%}
|
||||
{%- do dbt_utils._is_ephemeral(model, 'test_equality') -%}
|
||||
{%- set compare_columns = adapter.get_columns_in_relation(model) | map(attribute='quoted') -%}
|
||||
{%- endif -%}
|
||||
|
||||
{% set compare_model = kwargs.get('compare_model', kwargs.get('arg')) %}
|
||||
{% set compare_cols_csv = compare_columns | join(', ') %}
|
||||
|
||||
with a as (
|
||||
select * from {{ model }}
|
||||
),
|
||||
b as (
|
||||
select * from {{ compare_model }}
|
||||
),
|
||||
a_minus_b as (
|
||||
select {{compare_cols_csv}} from a
|
||||
{{ dbt_utils.except() }}
|
||||
select {{compare_cols_csv}} from b
|
||||
),
|
||||
b_minus_a as (
|
||||
select {{compare_cols_csv}} from b
|
||||
{{ dbt_utils.except() }}
|
||||
select {{compare_cols_csv}} from a
|
||||
),
|
||||
unioned as (
|
||||
select * from a_minus_b
|
||||
union all
|
||||
select * from b_minus_a
|
||||
)
|
||||
select count(*) from unioned
|
||||
{% endmacro %}
|
|
@ -0,0 +1,51 @@
|
|||
{#
|
||||
This overrides the behavior of the macro `should_full_refresh` so full refresh are triggered if:
|
||||
- the dbt cli is run with --full-refresh flag or the model is configured explicitly to full_refresh
|
||||
- the column _airbyte_ab_id does not exists in the normalized tables and make sure it is well populated.
|
||||
#}
|
||||
|
||||
{%- macro need_full_refresh(col_ab_id, target_table=this) -%}
|
||||
{%- if not execute -%}
|
||||
{{ return(false) }}
|
||||
{%- endif -%}
|
||||
{%- set found_column = [] %}
|
||||
{%- set cols = adapter.get_columns_in_relation(target_table) -%}
|
||||
{%- for col in cols -%}
|
||||
{%- if col.column == col_ab_id -%}
|
||||
{% do found_column.append(col.column) %}
|
||||
{%- endif -%}
|
||||
{%- endfor -%}
|
||||
{%- if found_column -%}
|
||||
{{ return(false) }}
|
||||
{%- else -%}
|
||||
{{ dbt_utils.log_info(target_table ~ "." ~ col_ab_id ~ " does not exist yet. The table will be created or rebuilt with dbt.full_refresh") }}
|
||||
{{ return(true) }}
|
||||
{%- endif -%}
|
||||
{%- endmacro -%}
|
||||
|
||||
{%- macro should_full_refresh() -%}
|
||||
{% set config_full_refresh = config.get('full_refresh') %}
|
||||
{%- if config_full_refresh is none -%}
|
||||
{% set config_full_refresh = flags.FULL_REFRESH %}
|
||||
{%- endif -%}
|
||||
{%- if not config_full_refresh -%}
|
||||
{% set config_full_refresh = need_full_refresh(get_col_ab_id(), this) %}
|
||||
{%- endif -%}
|
||||
{% do return(config_full_refresh) %}
|
||||
{%- endmacro -%}
|
||||
|
||||
{%- macro get_col_ab_id() -%}
|
||||
{{ adapter.dispatch('get_col_ab_id')() }}
|
||||
{%- endmacro -%}
|
||||
|
||||
{%- macro default__get_col_ab_id() -%}
|
||||
_airbyte_ab_id
|
||||
{%- endmacro -%}
|
||||
|
||||
{%- macro oracle__get_col_ab_id() -%}
|
||||
"_AIRBYTE_AB_ID"
|
||||
{%- endmacro -%}
|
||||
|
||||
{%- macro snowflake__get_col_ab_id() -%}
|
||||
_AIRBYTE_AB_ID
|
||||
{%- endmacro -%}
|
|
@ -0,0 +1,46 @@
|
|||
{#
|
||||
Similar to the star macro here: https://github.com/dbt-labs/dbt-utils/blob/main/macros/sql/star.sql
|
||||
|
||||
This star_intersect macro takes an additional 'intersect' relation as argument.
|
||||
Its behavior is to select columns from both 'intersect' and 'from' relations with the following rules:
|
||||
- if the columns are existing in both 'from' and the 'intersect' relations, then the column from 'intersect' is used
|
||||
- if it's not in the both relation, then only the column in the 'from' relation is used
|
||||
#}
|
||||
{% macro star_intersect(from, intersect, from_alias=False, intersect_alias=False, except=[]) -%}
|
||||
{%- do dbt_utils._is_relation(from, 'star_intersect') -%}
|
||||
{%- do dbt_utils._is_ephemeral(from, 'star_intersect') -%}
|
||||
{%- do dbt_utils._is_relation(intersect, 'star_intersect') -%}
|
||||
{%- do dbt_utils._is_ephemeral(intersect, 'star_intersect') -%}
|
||||
|
||||
{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}
|
||||
{%- if not execute -%}
|
||||
{{ return('') }}
|
||||
{% endif %}
|
||||
|
||||
{%- set include_cols = [] %}
|
||||
{%- set cols = adapter.get_columns_in_relation(from) -%}
|
||||
{%- set except = except | map("lower") | list %}
|
||||
{%- for col in cols -%}
|
||||
{%- if col.column|lower not in except -%}
|
||||
{% do include_cols.append(col.column) %}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
|
||||
{%- set include_intersect_cols = [] %}
|
||||
{%- set intersect_cols = adapter.get_columns_in_relation(intersect) -%}
|
||||
{%- for col in intersect_cols -%}
|
||||
{%- if col.column|lower not in except -%}
|
||||
{% do include_intersect_cols.append(col.column) %}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
|
||||
{%- for col in include_cols %}
|
||||
{%- if col in include_intersect_cols -%}
|
||||
{%- if intersect_alias %}{{ intersect_alias }}.{% else %}{%- endif -%}{{ adapter.quote(col)|trim }}
|
||||
{%- if not loop.last %},{{ '\n ' }}{% endif %}
|
||||
{%- else %}
|
||||
{%- if from_alias %}{{ from_alias }}.{% else %}{{ from }}.{%- endif -%}{{ adapter.quote(col)|trim }} as {{ adapter.quote(col)|trim }}
|
||||
{%- if not loop.last %},{{ '\n ' }}{% endif %}
|
||||
{%- endif %}
|
||||
{%- endfor -%}
|
||||
{%- endmacro %}
|
|
@ -0,0 +1,24 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_admin_tags') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['name'], ['name']) }} as {{ adapter.quote('name') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['type'], ['type']) }} as {{ adapter.quote('type') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['color'], ['color']) }} as color,
|
||||
{{ json_extract_scalar('_airbyte_data', ['active'], ['active']) }} as active,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['searchString'], ['searchString']) }} as searchstring,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_admin_tags') }} as table_alias
|
||||
-- admin_tags
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('admin_tags_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast({{ adapter.quote('name') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('name') }},
|
||||
cast({{ adapter.quote('type') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('type') }},
|
||||
cast(color as {{ dbt_utils.type_string() }}) as color,
|
||||
{{ cast_to_boolean('active') }} as active,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
cast(searchstring as {{ dbt_utils.type_string() }}) as searchstring,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('admin_tags_ab1') }}
|
||||
-- admin_tags
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('admin_tags_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
adapter.quote('name'),
|
||||
adapter.quote('type'),
|
||||
'color',
|
||||
boolean_to_string('active'),
|
||||
'createdat',
|
||||
'updatedat',
|
||||
'searchstring',
|
||||
]) }} as _airbyte_admin_tags_hashid,
|
||||
tmp.*
|
||||
from {{ ref('admin_tags_ab2') }} tmp
|
||||
-- admin_tags
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_cash_flows') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['hash'], ['hash']) }} as hash,
|
||||
{{ json_extract_scalar('_airbyte_data', ['user'], ['user']) }} as {{ adapter.quote('user') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['value'], ['value']) }} as {{ adapter.quote('value') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['action'], ['action']) }} as {{ adapter.quote('action') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['options'], ['options']) }} as {{ adapter.quote('options') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['category'], ['category']) }} as category,
|
||||
{{ json_extract_scalar('_airbyte_data', ['targetId'], ['targetId']) }} as targetid,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isAudited'], ['isAudited']) }} as isaudited,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['newBalance'], ['newBalance']) }} as newbalance,
|
||||
{{ json_extract_scalar('_airbyte_data', ['oldBalance'], ['oldBalance']) }} as oldbalance,
|
||||
{{ json_extract_scalar('_airbyte_data', ['targetType'], ['targetType']) }} as targettype,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isProcessed'], ['isProcessed']) }} as isprocessed,
|
||||
{{ json_extract_scalar('_airbyte_data', ['processStatus'], ['processStatus']) }} as processstatus,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_cash_flows') }} as table_alias
|
||||
-- cash_flows
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('cash_flows_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast(hash as {{ dbt_utils.type_string() }}) as hash,
|
||||
cast({{ adapter.quote('user') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('user') }},
|
||||
cast({{ adapter.quote('value') }} as {{ dbt_utils.type_float() }}) as {{ adapter.quote('value') }},
|
||||
cast({{ adapter.quote('action') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('action') }},
|
||||
cast({{ adapter.quote('options') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('options') }},
|
||||
cast(category as {{ dbt_utils.type_string() }}) as category,
|
||||
cast(targetid as {{ dbt_utils.type_string() }}) as targetid,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
{{ cast_to_boolean('isaudited') }} as isaudited,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
cast(newbalance as {{ dbt_utils.type_float() }}) as newbalance,
|
||||
cast(oldbalance as {{ dbt_utils.type_float() }}) as oldbalance,
|
||||
cast(targettype as {{ dbt_utils.type_string() }}) as targettype,
|
||||
{{ cast_to_boolean('isprocessed') }} as isprocessed,
|
||||
cast(processstatus as {{ dbt_utils.type_string() }}) as processstatus,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('cash_flows_ab1') }}
|
||||
-- cash_flows
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('cash_flows_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
'hash',
|
||||
adapter.quote('user'),
|
||||
adapter.quote('value'),
|
||||
adapter.quote('action'),
|
||||
adapter.quote('options'),
|
||||
'category',
|
||||
'targetid',
|
||||
'createdat',
|
||||
boolean_to_string('isaudited'),
|
||||
'updatedat',
|
||||
'newbalance',
|
||||
'oldbalance',
|
||||
'targettype',
|
||||
boolean_to_string('isprocessed'),
|
||||
'processstatus',
|
||||
]) }} as _airbyte_cash_flows_hashid,
|
||||
tmp.*
|
||||
from {{ ref('cash_flows_ab2') }} tmp
|
||||
-- cash_flows
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_cities') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['code'], ['code']) }} as code,
|
||||
{{ json_extract_scalar('_airbyte_data', ['name'], ['name']) }} as {{ adapter.quote('name') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['slug'], ['slug']) }} as slug,
|
||||
{{ json_extract_scalar('_airbyte_data', ['order'], ['order']) }} as {{ adapter.quote('order') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['tncId'], ['tncId']) }} as tncid,
|
||||
{{ json_extract_scalar('_airbyte_data', ['region'], ['region']) }} as region,
|
||||
{{ json_extract_scalar('_airbyte_data', ['source'], ['source']) }} as {{ adapter.quote('source') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['tempId'], ['tempId']) }} as tempid,
|
||||
{{ json_extract_scalar('_airbyte_data', ['tncCode'], ['tncCode']) }} as tnccode,
|
||||
{{ json_extract_scalar('_airbyte_data', ['osirisId'], ['osirisId']) }} as osirisid,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['searchString'], ['searchString']) }} as searchstring,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_cities') }} as table_alias
|
||||
-- cities
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('cities_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast(code as {{ dbt_utils.type_float() }}) as code,
|
||||
cast({{ adapter.quote('name') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('name') }},
|
||||
cast(slug as {{ dbt_utils.type_string() }}) as slug,
|
||||
cast({{ adapter.quote('order') }} as {{ dbt_utils.type_float() }}) as {{ adapter.quote('order') }},
|
||||
cast(tncid as {{ dbt_utils.type_float() }}) as tncid,
|
||||
cast(region as {{ dbt_utils.type_string() }}) as region,
|
||||
cast({{ adapter.quote('source') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('source') }},
|
||||
cast(tempid as {{ dbt_utils.type_float() }}) as tempid,
|
||||
cast(tnccode as {{ dbt_utils.type_string() }}) as tnccode,
|
||||
cast(osirisid as {{ dbt_utils.type_float() }}) as osirisid,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
cast(searchstring as {{ dbt_utils.type_string() }}) as searchstring,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('cities_ab1') }}
|
||||
-- cities
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('cities_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
'code',
|
||||
adapter.quote('name'),
|
||||
'slug',
|
||||
adapter.quote('order'),
|
||||
'tncid',
|
||||
'region',
|
||||
adapter.quote('source'),
|
||||
'tempid',
|
||||
'tnccode',
|
||||
'osirisid',
|
||||
'updatedat',
|
||||
'searchstring',
|
||||
]) }} as _airbyte_cities_hashid,
|
||||
tmp.*
|
||||
from {{ ref('cities_ab2') }} tmp
|
||||
-- cities
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_customers') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['info'], ['info']) }} as info,
|
||||
{{ json_extract_scalar('_airbyte_data', ['name'], ['name']) }} as {{ adapter.quote('name') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['user'], ['user']) }} as {{ adapter.quote('user') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['email'], ['email']) }} as email,
|
||||
{{ json_extract_scalar('_airbyte_data', ['phone'], ['phone']) }} as phone,
|
||||
{{ json_extract_scalar('_airbyte_data', ['source'], ['source']) }} as {{ adapter.quote('source') }},
|
||||
{{ json_extract_array('_airbyte_data', ['location'], ['location']) }} as {{ adapter.quote('location') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isDeleted'], ['isDeleted']) }} as isdeleted,
|
||||
{{ json_extract_scalar('_airbyte_data', ['statistic'], ['statistic']) }} as statistic,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['searchString'], ['searchString']) }} as searchstring,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_customers') }} as table_alias
|
||||
-- customers
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('customers_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast(info as {{ dbt_utils.type_string() }}) as info,
|
||||
cast({{ adapter.quote('name') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('name') }},
|
||||
cast({{ adapter.quote('user') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('user') }},
|
||||
cast(email as {{ dbt_utils.type_string() }}) as email,
|
||||
cast(phone as {{ dbt_utils.type_string() }}) as phone,
|
||||
cast({{ adapter.quote('source') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('source') }},
|
||||
{{ adapter.quote('location') }},
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
{{ cast_to_boolean('isdeleted') }} as isdeleted,
|
||||
cast(statistic as {{ dbt_utils.type_string() }}) as statistic,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
cast(searchstring as {{ dbt_utils.type_string() }}) as searchstring,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('customers_ab1') }}
|
||||
-- customers
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('customers_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
'info',
|
||||
adapter.quote('name'),
|
||||
adapter.quote('user'),
|
||||
'email',
|
||||
'phone',
|
||||
adapter.quote('source'),
|
||||
array_to_string(adapter.quote('location')),
|
||||
'createdat',
|
||||
boolean_to_string('isdeleted'),
|
||||
'statistic',
|
||||
'updatedat',
|
||||
'searchstring',
|
||||
]) }} as _airbyte_customers_hashid,
|
||||
tmp.*
|
||||
from {{ ref('customers_ab2') }} tmp
|
||||
-- customers
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_districts') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['city'], ['city']) }} as city,
|
||||
{{ json_extract_scalar('_airbyte_data', ['code'], ['code']) }} as code,
|
||||
{{ json_extract_scalar('_airbyte_data', ['name'], ['name']) }} as {{ adapter.quote('name') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['slug'], ['slug']) }} as slug,
|
||||
{{ json_extract_scalar('_airbyte_data', ['tncId'], ['tncId']) }} as tncid,
|
||||
{{ json_extract_scalar('_airbyte_data', ['cityId'], ['cityId']) }} as cityid,
|
||||
{{ json_extract_scalar('_airbyte_data', ['source'], ['source']) }} as {{ adapter.quote('source') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['tncCode'], ['tncCode']) }} as tnccode,
|
||||
{{ json_extract_scalar('_airbyte_data', ['gidoCode'], ['gidoCode']) }} as gidocode,
|
||||
{{ json_extract_scalar('_airbyte_data', ['osirisId'], ['osirisId']) }} as osirisid,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['searchString'], ['searchString']) }} as searchstring,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_districts') }} as table_alias
|
||||
-- districts
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('districts_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast(city as {{ dbt_utils.type_string() }}) as city,
|
||||
cast(code as {{ dbt_utils.type_float() }}) as code,
|
||||
cast({{ adapter.quote('name') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('name') }},
|
||||
cast(slug as {{ dbt_utils.type_string() }}) as slug,
|
||||
cast(tncid as {{ dbt_utils.type_float() }}) as tncid,
|
||||
cast(cityid as {{ dbt_utils.type_float() }}) as cityid,
|
||||
cast({{ adapter.quote('source') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('source') }},
|
||||
cast(tnccode as {{ dbt_utils.type_string() }}) as tnccode,
|
||||
cast(gidocode as {{ dbt_utils.type_float() }}) as gidocode,
|
||||
cast(osirisid as {{ dbt_utils.type_float() }}) as osirisid,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
cast(searchstring as {{ dbt_utils.type_string() }}) as searchstring,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('districts_ab1') }}
|
||||
-- districts
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('districts_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
'city',
|
||||
'code',
|
||||
adapter.quote('name'),
|
||||
'slug',
|
||||
'tncid',
|
||||
'cityid',
|
||||
adapter.quote('source'),
|
||||
'tnccode',
|
||||
'gidocode',
|
||||
'osirisid',
|
||||
'updatedat',
|
||||
'searchstring',
|
||||
]) }} as _airbyte_districts_hashid,
|
||||
tmp.*
|
||||
from {{ ref('districts_ab2') }} tmp
|
||||
-- districts
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_event_rewards') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['cash'], ['cash']) }} as cash,
|
||||
{{ json_extract_scalar('_airbyte_data', ['type'], ['type']) }} as {{ adapter.quote('type') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['user'], ['user']) }} as {{ adapter.quote('user') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['event'], ['event']) }} as {{ adapter.quote('event') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['status'], ['status']) }} as status,
|
||||
{{ json_extract_scalar('_airbyte_data', ['options'], ['options']) }} as {{ adapter.quote('options') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedat'], ['updatedat']) }} as updatedat,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_event_rewards') }} as table_alias
|
||||
-- event_rewards
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('event_rewards_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast(cash as {{ dbt_utils.type_float() }}) as cash,
|
||||
cast({{ adapter.quote('type') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('type') }},
|
||||
cast({{ adapter.quote('user') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('user') }},
|
||||
cast({{ adapter.quote('event') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('event') }},
|
||||
cast(status as {{ dbt_utils.type_string() }}) as status,
|
||||
cast({{ adapter.quote('options') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('options') }},
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('event_rewards_ab1') }}
|
||||
-- event_rewards
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('event_rewards_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
'cash',
|
||||
adapter.quote('type'),
|
||||
adapter.quote('user'),
|
||||
adapter.quote('event'),
|
||||
'status',
|
||||
adapter.quote('options'),
|
||||
'createdat',
|
||||
'updatedat',
|
||||
]) }} as _airbyte_event_rewards_hashid,
|
||||
tmp.*
|
||||
from {{ ref('event_rewards_ab2') }} tmp
|
||||
-- event_rewards
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_events') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['desc'], ['desc']) }} as {{ adapter.quote('desc') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['icon'], ['icon']) }} as icon,
|
||||
{{ json_extract_scalar('_airbyte_data', ['name'], ['name']) }} as {{ adapter.quote('name') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['type'], ['type']) }} as {{ adapter.quote('type') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['cover'], ['cover']) }} as cover,
|
||||
{{ json_extract_scalar('_airbyte_data', ['endAt'], ['endAt']) }} as endat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['active'], ['active']) }} as active,
|
||||
{{ json_extract_scalar('_airbyte_data', ['notice'], ['notice']) }} as notice,
|
||||
{{ json_extract_scalar('_airbyte_data', ['screen'], ['screen']) }} as screen,
|
||||
{{ json_extract_scalar('_airbyte_data', ['article'], ['article']) }} as article,
|
||||
{{ json_extract_scalar('_airbyte_data', ['options'], ['options']) }} as {{ adapter.quote('options') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['startAt'], ['startAt']) }} as startat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['segments'], ['segments']) }} as segments,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['groupType'], ['groupType']) }} as grouptype,
|
||||
{{ json_extract_array('_airbyte_data', ['suppliers'], ['suppliers']) }} as suppliers,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_array('_airbyte_data', ['information'], ['information']) }} as information,
|
||||
{{ json_extract_scalar('_airbyte_data', ['displayEndAt'], ['displayEndAt']) }} as displayendat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isApplyForAll'], ['isApplyForAll']) }} as isapplyforall,
|
||||
{{ json_extract_scalar('_airbyte_data', ['displayStartAt'], ['displayStartAt']) }} as displaystartat,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_events') }} as table_alias
|
||||
-- events
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('events_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast({{ adapter.quote('desc') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('desc') }},
|
||||
cast(icon as {{ dbt_utils.type_string() }}) as icon,
|
||||
cast({{ adapter.quote('name') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('name') }},
|
||||
cast({{ adapter.quote('type') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('type') }},
|
||||
cast(cover as {{ dbt_utils.type_string() }}) as cover,
|
||||
cast(endat as {{ dbt_utils.type_string() }}) as endat,
|
||||
{{ cast_to_boolean('active') }} as active,
|
||||
cast(notice as {{ dbt_utils.type_string() }}) as notice,
|
||||
cast(screen as {{ dbt_utils.type_string() }}) as screen,
|
||||
cast(article as {{ dbt_utils.type_string() }}) as article,
|
||||
cast({{ adapter.quote('options') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('options') }},
|
||||
cast(startat as {{ dbt_utils.type_string() }}) as startat,
|
||||
cast(segments as {{ dbt_utils.type_string() }}) as segments,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
cast(grouptype as {{ dbt_utils.type_string() }}) as grouptype,
|
||||
suppliers,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
information,
|
||||
cast(displayendat as {{ dbt_utils.type_string() }}) as displayendat,
|
||||
{{ cast_to_boolean('isapplyforall') }} as isapplyforall,
|
||||
cast(displaystartat as {{ dbt_utils.type_string() }}) as displaystartat,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('events_ab1') }}
|
||||
-- events
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('events_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
adapter.quote('desc'),
|
||||
'icon',
|
||||
adapter.quote('name'),
|
||||
adapter.quote('type'),
|
||||
'cover',
|
||||
'endat',
|
||||
boolean_to_string('active'),
|
||||
'notice',
|
||||
'screen',
|
||||
'article',
|
||||
adapter.quote('options'),
|
||||
'startat',
|
||||
'segments',
|
||||
'createdat',
|
||||
'grouptype',
|
||||
array_to_string('suppliers'),
|
||||
'updatedat',
|
||||
array_to_string('information'),
|
||||
'displayendat',
|
||||
boolean_to_string('isapplyforall'),
|
||||
'displaystartat',
|
||||
]) }} as _airbyte_events_hashid,
|
||||
tmp.*
|
||||
from {{ ref('events_ab2') }} tmp
|
||||
-- events
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_inventories') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['id'], ['id']) }} as {{ adapter.quote('id') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['code'], ['code']) }} as code,
|
||||
{{ json_extract_scalar('_airbyte_data', ['name'], ['name']) }} as {{ adapter.quote('name') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['slug'], ['slug']) }} as slug,
|
||||
{{ json_extract_scalar('_airbyte_data', ['active'], ['active']) }} as active,
|
||||
{{ json_extract_scalar('_airbyte_data', ['contact'], ['contact']) }} as contact,
|
||||
{{ json_extract_scalar('_airbyte_data', ['partner'], ['partner']) }} as partner,
|
||||
{{ json_extract_scalar('_airbyte_data', ['checksum'], ['checksum']) }} as checksum,
|
||||
{{ json_extract_scalar('_airbyte_data', ['location'], ['location']) }} as {{ adapter.quote('location') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['supplier'], ['supplier']) }} as supplier,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['minimumValue'], ['minimumValue']) }} as minimumvalue,
|
||||
{{ json_extract_scalar('_airbyte_data', ['searchString'], ['searchString']) }} as searchstring,
|
||||
{{ json_extract_scalar('_airbyte_data', ['paymentMethods'], ['paymentMethods']) }} as paymentmethods,
|
||||
{{ json_extract_scalar('_airbyte_data', ['canIssueInvoice'], ['canIssueInvoice']) }} as canissueinvoice,
|
||||
{{ json_extract_array('_airbyte_data', ['deliveryMethods'], ['deliveryMethods']) }} as deliverymethods,
|
||||
{{ json_extract_scalar('_airbyte_data', ['canAutoSendEmail'], ['canAutoSendEmail']) }} as canautosendemail,
|
||||
{{ json_extract_scalar('_airbyte_data', ['invoiceDeliveryMethod'], ['invoiceDeliveryMethod']) }} as invoicedeliverymethod,
|
||||
{{ json_extract_scalar('_airbyte_data', ['doesSupportSellyExpress'], ['doesSupportSellyExpress']) }} as doessupportsellyexpress,
|
||||
{{ json_extract_array('_airbyte_data', ['priorityDeliveryServiceCodes'], ['priorityDeliveryServiceCodes']) }} as prioritydeliveryservicecodes,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_inventories') }} as table_alias
|
||||
-- inventories
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('inventories_ab1') }}
|
||||
select
|
||||
cast({{ adapter.quote('id') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('id') }},
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast(code as {{ dbt_utils.type_float() }}) as code,
|
||||
cast({{ adapter.quote('name') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('name') }},
|
||||
cast(slug as {{ dbt_utils.type_string() }}) as slug,
|
||||
{{ cast_to_boolean('active') }} as active,
|
||||
cast(contact as {{ dbt_utils.type_string() }}) as contact,
|
||||
cast(partner as {{ dbt_utils.type_string() }}) as partner,
|
||||
cast(checksum as {{ dbt_utils.type_string() }}) as checksum,
|
||||
cast({{ adapter.quote('location') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('location') }},
|
||||
cast(supplier as {{ dbt_utils.type_string() }}) as supplier,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
cast(minimumvalue as {{ dbt_utils.type_float() }}) as minimumvalue,
|
||||
cast(searchstring as {{ dbt_utils.type_string() }}) as searchstring,
|
||||
cast(paymentmethods as {{ dbt_utils.type_string() }}) as paymentmethods,
|
||||
{{ cast_to_boolean('canissueinvoice') }} as canissueinvoice,
|
||||
deliverymethods,
|
||||
{{ cast_to_boolean('canautosendemail') }} as canautosendemail,
|
||||
cast(invoicedeliverymethod as {{ dbt_utils.type_string() }}) as invoicedeliverymethod,
|
||||
{{ cast_to_boolean('doessupportsellyexpress') }} as doessupportsellyexpress,
|
||||
prioritydeliveryservicecodes,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('inventories_ab1') }}
|
||||
-- inventories
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('inventories_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
adapter.quote('id'),
|
||||
'_id',
|
||||
'code',
|
||||
adapter.quote('name'),
|
||||
'slug',
|
||||
boolean_to_string('active'),
|
||||
'contact',
|
||||
'partner',
|
||||
'checksum',
|
||||
adapter.quote('location'),
|
||||
'supplier',
|
||||
'createdat',
|
||||
'updatedat',
|
||||
'minimumvalue',
|
||||
'searchstring',
|
||||
'paymentmethods',
|
||||
boolean_to_string('canissueinvoice'),
|
||||
array_to_string('deliverymethods'),
|
||||
boolean_to_string('canautosendemail'),
|
||||
'invoicedeliverymethod',
|
||||
boolean_to_string('doessupportsellyexpress'),
|
||||
array_to_string('prioritydeliveryservicecodes'),
|
||||
]) }} as _airbyte_inventories_hashid,
|
||||
tmp.*
|
||||
from {{ ref('inventories_ab2') }} tmp
|
||||
-- inventories
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_membership') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['name'], ['name']) }} as {{ adapter.quote('name') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['color'], ['color']) }} as color,
|
||||
{{ json_extract_scalar('_airbyte_data', ['guide'], ['guide']) }} as guide,
|
||||
{{ json_extract_scalar('_airbyte_data', ['level'], ['level']) }} as {{ adapter.quote('level') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['sales'], ['sales']) }} as sales,
|
||||
{{ json_extract_scalar('_airbyte_data', ['active'], ['active']) }} as active,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedat'], ['updatedat']) }} as updatedat_1,
|
||||
{{ json_extract_scalar('_airbyte_data', ['transaction'], ['transaction']) }} as {{ adapter.quote('transaction') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['bonusPercent'], ['bonusPercent']) }} as bonuspercent,
|
||||
{{ json_extract_scalar('_airbyte_data', ['bonuspercent'], ['bonuspercent']) }} as bonuspercent_1,
|
||||
{{ json_extract_scalar('_airbyte_data', ['transactionMinValue'], ['transactionMinValue']) }} as transactionminvalue,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_membership') }} as table_alias
|
||||
-- membership
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('membership_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast({{ adapter.quote('name') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('name') }},
|
||||
cast(color as {{ dbt_utils.type_string() }}) as color,
|
||||
cast(guide as {{ dbt_utils.type_string() }}) as guide,
|
||||
cast({{ adapter.quote('level') }} as {{ dbt_utils.type_float() }}) as {{ adapter.quote('level') }},
|
||||
cast(sales as {{ dbt_utils.type_float() }}) as sales,
|
||||
{{ cast_to_boolean('active') }} as active,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
cast(updatedat_1 as {{ dbt_utils.type_string() }}) as updatedat_1,
|
||||
cast({{ adapter.quote('transaction') }} as {{ dbt_utils.type_float() }}) as {{ adapter.quote('transaction') }},
|
||||
cast(bonuspercent as {{ dbt_utils.type_float() }}) as bonuspercent,
|
||||
cast(bonuspercent_1 as {{ dbt_utils.type_float() }}) as bonuspercent_1,
|
||||
cast(transactionminvalue as {{ dbt_utils.type_float() }}) as transactionminvalue,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('membership_ab1') }}
|
||||
-- membership
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('membership_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
adapter.quote('name'),
|
||||
'color',
|
||||
'guide',
|
||||
adapter.quote('level'),
|
||||
'sales',
|
||||
boolean_to_string('active'),
|
||||
'createdat',
|
||||
'updatedat',
|
||||
'updatedat_1',
|
||||
adapter.quote('transaction'),
|
||||
'bonuspercent',
|
||||
'bonuspercent_1',
|
||||
'transactionminvalue',
|
||||
]) }} as _airbyte_membership_hashid,
|
||||
tmp.*
|
||||
from {{ ref('membership_ab2') }} tmp
|
||||
-- membership
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_membership_histories') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['new'], ['new']) }} as {{ adapter.quote('new') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['old'], ['old']) }} as {{ adapter.quote('old') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['type'], ['type']) }} as {{ adapter.quote('type') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['user'], ['user']) }} as {{ adapter.quote('user') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_membership_histories') }} as table_alias
|
||||
-- membership_histories
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('membership_histories_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast({{ adapter.quote('new') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('new') }},
|
||||
cast({{ adapter.quote('old') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('old') }},
|
||||
cast({{ adapter.quote('type') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('type') }},
|
||||
cast({{ adapter.quote('user') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('user') }},
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('membership_histories_ab1') }}
|
||||
-- membership_histories
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('membership_histories_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
adapter.quote('new'),
|
||||
adapter.quote('old'),
|
||||
adapter.quote('type'),
|
||||
adapter.quote('user'),
|
||||
'createdat',
|
||||
]) }} as _airbyte_membership_histories_hashid,
|
||||
tmp.*
|
||||
from {{ ref('membership_histories_ab2') }} tmp
|
||||
-- membership_histories
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_membership_orders') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['user'], ['user']) }} as {{ adapter.quote('user') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['status'], ['status']) }} as status,
|
||||
{{ json_extract_scalar('_airbyte_data', ['percent'], ['percent']) }} as {{ adapter.quote('percent') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['targetID'], ['targetID']) }} as targetid,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['commission'], ['commission']) }} as commission,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isRejected'], ['isRejected']) }} as isrejected,
|
||||
{{ json_extract_scalar('_airbyte_data', ['membershipName'], ['membershipName']) }} as membershipname,
|
||||
{{ json_extract_scalar('_airbyte_data', ['membershipLevel'], ['membershipLevel']) }} as membershiplevel,
|
||||
{{ json_extract_scalar('_airbyte_data', ['membershipPromotion'], ['membershipPromotion']) }} as membershippromotion,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_membership_orders') }} as table_alias
|
||||
-- membership_orders
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('membership_orders_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast({{ adapter.quote('user') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('user') }},
|
||||
cast(status as {{ dbt_utils.type_string() }}) as status,
|
||||
cast({{ adapter.quote('percent') }} as {{ dbt_utils.type_float() }}) as {{ adapter.quote('percent') }},
|
||||
cast(targetid as {{ dbt_utils.type_string() }}) as targetid,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
cast(commission as {{ dbt_utils.type_float() }}) as commission,
|
||||
{{ cast_to_boolean('isrejected') }} as isrejected,
|
||||
cast(membershipname as {{ dbt_utils.type_string() }}) as membershipname,
|
||||
cast(membershiplevel as {{ dbt_utils.type_float() }}) as membershiplevel,
|
||||
cast({{ adapter.quote('membershippromotion') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('membershippromotion') }},
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('membership_orders_ab1') }}
|
||||
-- membership_orders
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('membership_orders_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
adapter.quote('user'),
|
||||
'status',
|
||||
adapter.quote('percent'),
|
||||
'targetid',
|
||||
'createdat',
|
||||
'updatedat',
|
||||
'commission',
|
||||
boolean_to_string('isrejected'),
|
||||
'membershipname',
|
||||
'membershiplevel',
|
||||
'membershippromotion'
|
||||
]) }} as _airbyte_membership_orders_hashid,
|
||||
tmp.*
|
||||
from {{ ref('membership_orders_ab2') }} tmp
|
||||
-- membership_orders
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_order_deliveries') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['code'], ['code']) }} as code,
|
||||
{{ json_extract_scalar('_airbyte_data', ['order'], ['order']) }} as {{ adapter.quote('order') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['title'], ['title']) }} as title,
|
||||
{{ json_extract_scalar('_airbyte_data', ['status'], ['status']) }} as status,
|
||||
{{ json_extract_scalar('_airbyte_data', ['service'], ['service']) }} as service,
|
||||
{{ json_extract_scalar('_airbyte_data', ['customer'], ['customer']) }} as customer,
|
||||
{{ json_extract_scalar('_airbyte_data', ['delivery'], ['delivery']) }} as delivery,
|
||||
{{ json_extract_scalar('_airbyte_data', ['location'], ['location']) }} as {{ adapter.quote('location') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['promotion'], ['promotion']) }} as promotion,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['trackingCode'], ['trackingCode']) }} as trackingcode,
|
||||
{{ json_extract_scalar('_airbyte_data', ['trackingTime'], ['trackingTime']) }} as trackingtime,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isAdminChange'], ['isAdminChange']) }} as isadminchange,
|
||||
{{ json_extract_scalar('_airbyte_data', ['trackingOrderCode'], ['trackingOrderCode']) }} as trackingordercode,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_order_deliveries') }} as table_alias
|
||||
-- order_deliveries
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('order_deliveries_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast(code as {{ dbt_utils.type_string() }}) as code,
|
||||
cast({{ adapter.quote('order') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('order') }},
|
||||
cast(title as {{ dbt_utils.type_string() }}) as title,
|
||||
cast(status as {{ dbt_utils.type_string() }}) as status,
|
||||
cast(service as {{ dbt_utils.type_string() }}) as service,
|
||||
cast(customer as {{ dbt_utils.type_string() }}) as customer,
|
||||
cast(delivery as {{ dbt_utils.type_string() }}) as delivery,
|
||||
cast({{ adapter.quote('location') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('location') }},
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
cast(promotion as {{ dbt_utils.type_string() }}) as promotion,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
cast(trackingcode as {{ dbt_utils.type_string() }}) as trackingcode,
|
||||
cast(trackingtime as {{ dbt_utils.type_string() }}) as trackingtime,
|
||||
{{ cast_to_boolean('isadminchange') }} as isadminchange,
|
||||
cast(trackingordercode as {{ dbt_utils.type_string() }}) as trackingordercode,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('order_deliveries_ab1') }}
|
||||
-- order_deliveries
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('order_deliveries_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
'code',
|
||||
adapter.quote('order'),
|
||||
'title',
|
||||
'status',
|
||||
'service',
|
||||
'customer',
|
||||
'delivery',
|
||||
adapter.quote('location'),
|
||||
'createdat',
|
||||
'promotion',
|
||||
'updatedat',
|
||||
'trackingcode',
|
||||
'trackingtime',
|
||||
boolean_to_string('isadminchange'),
|
||||
'trackingordercode',
|
||||
]) }} as _airbyte_order_deliveries_hashid,
|
||||
tmp.*
|
||||
from {{ ref('order_deliveries_ab2') }} tmp
|
||||
-- order_deliveries
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_order_histories') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['order'], ['order']) }} as {{ adapter.quote('order') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['status'], ['status']) }} as status,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['timestamp'], ['timestamp']) }} as {{ adapter.quote('timestamp') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_order_histories') }} as table_alias
|
||||
-- order_histories
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('order_histories_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast({{ adapter.quote('order') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('order') }},
|
||||
cast(status as {{ dbt_utils.type_string() }}) as status,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
cast({{ adapter.quote('timestamp') }} as {{ dbt_utils.type_float() }}) as {{ adapter.quote('timestamp') }},
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('order_histories_ab1') }}
|
||||
-- order_histories
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('order_histories_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
adapter.quote('order'),
|
||||
'status',
|
||||
'createdat',
|
||||
adapter.quote('timestamp'),
|
||||
'updatedat',
|
||||
]) }} as _airbyte_order_histories_hashid,
|
||||
tmp.*
|
||||
from {{ ref('order_histories_ab2') }} tmp
|
||||
-- order_histories
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,45 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_order_items') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['sku'], ['sku']) }} as sku,
|
||||
{{ json_extract_scalar('_airbyte_data', ['date'], ['date']) }} as {{ adapter.quote('date') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['user'], ['user']) }} as {{ adapter.quote('user') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['brand'], ['brand']) }} as brand,
|
||||
{{ json_extract_scalar('_airbyte_data', ['order'], ['order']) }} as {{ adapter.quote('order') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['price'], ['price']) }} as price,
|
||||
{{ json_extract_scalar('_airbyte_data', ['status'], ['status']) }} as status,
|
||||
{{ json_extract_scalar('_airbyte_data', ['teamId'], ['teamId']) }} as teamid,
|
||||
{{ json_extract_scalar('_airbyte_data', ['product'], ['product']) }} as product,
|
||||
{{ json_extract_scalar('_airbyte_data', ['voucher'], ['voucher']) }} as voucher,
|
||||
{{ json_extract_scalar('_airbyte_data', ['customer'], ['customer']) }} as customer,
|
||||
{{ json_extract_scalar('_airbyte_data', ['quantity'], ['quantity']) }} as quantity,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['inventory'], ['inventory']) }} as inventory,
|
||||
{{ json_extract_scalar('_airbyte_data', ['totalSell'], ['totalSell']) }} as totalsell,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['cashbackAt'], ['cashbackAt']) }} as cashbackat,
|
||||
{{ json_extract_array('_airbyte_data', ['promotions'], ['promotions']) }} as promotions,
|
||||
{{ json_extract_scalar('_airbyte_data', ['totalPrice'], ['totalPrice']) }} as totalprice,
|
||||
{{ json_extract_scalar('_airbyte_data', ['deliveredAt'], ['deliveredAt']) }} as deliveredat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['teamMemberId'], ['teamMemberId']) }} as teammemberid,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isAssignCoupon'], ['isAssignCoupon']) }} as isassigncoupon,
|
||||
{{ json_extract_scalar('_airbyte_data', ['totalPromotion'], ['totalPromotion']) }} as totalpromotion,
|
||||
{{ json_extract_scalar('_airbyte_data', ['inWholesaleRange'], ['inWholesaleRange']) }} as inwholesalerange,
|
||||
{{ json_extract_scalar('_airbyte_data', ['voucherCashTotal'], ['voucherCashTotal']) }} as vouchercashtotal,
|
||||
{{ json_extract_scalar('_airbyte_data', ['wholesaleRangeId'], ['wholesaleRangeId']) }} as wholesalerangeid,
|
||||
{{ json_extract_scalar('_airbyte_data', ['totalWholesaleBonus'], ['totalWholesaleBonus']) }} as totalwholesalebonus,
|
||||
{{ json_extract_scalar('_airbyte_data', ['totalSellyWholesaleBonus'], ['totalSellyWholesaleBonus']) }} as totalsellywholesalebonus,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_order_items') }} as table_alias
|
||||
-- order_items
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,45 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('order_items_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast(sku as {{ dbt_utils.type_string() }}) as sku,
|
||||
cast({{ adapter.quote('date') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('date') }},
|
||||
cast({{ adapter.quote('user') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('user') }},
|
||||
cast(brand as {{ dbt_utils.type_string() }}) as brand,
|
||||
cast({{ adapter.quote('order') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('order') }},
|
||||
cast(price as {{ dbt_utils.type_string() }}) as price,
|
||||
cast(status as {{ dbt_utils.type_string() }}) as status,
|
||||
cast(teamid as {{ dbt_utils.type_string() }}) as teamid,
|
||||
cast(product as {{ dbt_utils.type_string() }}) as product,
|
||||
cast(voucher as {{ dbt_utils.type_string() }}) as voucher,
|
||||
cast(customer as {{ dbt_utils.type_string() }}) as customer,
|
||||
cast(quantity as {{ dbt_utils.type_float() }}) as quantity,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
cast(inventory as {{ dbt_utils.type_string() }}) as inventory,
|
||||
cast(totalsell as {{ dbt_utils.type_float() }}) as totalsell,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
cast(cashbackat as {{ dbt_utils.type_string() }}) as cashbackat,
|
||||
promotions,
|
||||
cast(totalprice as {{ dbt_utils.type_float() }}) as totalprice,
|
||||
cast(deliveredat as {{ dbt_utils.type_string() }}) as deliveredat,
|
||||
cast(teammemberid as {{ dbt_utils.type_string() }}) as teammemberid,
|
||||
{{ cast_to_boolean('isassigncoupon') }} as isassigncoupon,
|
||||
cast(totalpromotion as {{ dbt_utils.type_float() }}) as totalpromotion,
|
||||
{{ cast_to_boolean('inwholesalerange') }} as inwholesalerange,
|
||||
cast(vouchercashtotal as {{ dbt_utils.type_float() }}) as vouchercashtotal,
|
||||
cast(wholesalerangeid as {{ dbt_utils.type_string() }}) as wholesalerangeid,
|
||||
cast(totalwholesalebonus as {{ dbt_utils.type_float() }}) as totalwholesalebonus,
|
||||
cast(totalsellywholesalebonus as {{ dbt_utils.type_float() }}) as totalsellywholesalebonus,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('order_items_ab1') }}
|
||||
-- order_items
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,45 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('order_items_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
'sku',
|
||||
adapter.quote('date'),
|
||||
adapter.quote('user'),
|
||||
'brand',
|
||||
adapter.quote('order'),
|
||||
'price',
|
||||
'status',
|
||||
'teamid',
|
||||
'product',
|
||||
'voucher',
|
||||
'customer',
|
||||
'quantity',
|
||||
'createdat',
|
||||
'inventory',
|
||||
'totalsell',
|
||||
'updatedat',
|
||||
'cashbackat',
|
||||
array_to_string('promotions'),
|
||||
'totalprice',
|
||||
'deliveredat',
|
||||
'teammemberid',
|
||||
boolean_to_string('isassigncoupon'),
|
||||
'totalpromotion',
|
||||
boolean_to_string('inwholesalerange'),
|
||||
'vouchercashtotal',
|
||||
'wholesalerangeid',
|
||||
'totalwholesalebonus',
|
||||
'totalsellywholesalebonus',
|
||||
]) }} as _airbyte_order_items_hashid,
|
||||
tmp.*
|
||||
from {{ ref('order_items_ab2') }} tmp
|
||||
-- order_items
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_orders') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['code'], ['code']) }} as code,
|
||||
{{ json_extract_scalar('_airbyte_data', ['date'], ['date']) }} as {{ adapter.quote('date') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['hour'], ['hour']) }} as {{ adapter.quote('hour') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['note'], ['note']) }} as note,
|
||||
{{ json_extract_array('_airbyte_data', ['tags'], ['tags']) }} as tags,
|
||||
{{ json_extract_scalar('_airbyte_data', ['user'], ['user']) }} as {{ adapter.quote('user') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['price'], ['price']) }} as price,
|
||||
{{ json_extract_scalar('_airbyte_data', ['banned'], ['banned']) }} as banned,
|
||||
{{ json_extract_scalar('_airbyte_data', ['reason'], ['reason']) }} as reason,
|
||||
{{ json_extract_scalar('_airbyte_data', ['source'], ['source']) }} as {{ adapter.quote('source') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['status'], ['status']) }} as status,
|
||||
{{ json_extract_scalar('_airbyte_data', ['teamId'], ['teamId']) }} as teamid,
|
||||
{{ json_extract_scalar('_airbyte_data', ['payment'], ['payment']) }} as payment,
|
||||
{{ json_extract_scalar('_airbyte_data', ['remarks'], ['remarks']) }} as remarks,
|
||||
{{ json_extract_scalar('_airbyte_data', ['customer'], ['customer']) }} as customer,
|
||||
{{ json_extract_scalar('_airbyte_data', ['delivery'], ['delivery']) }} as delivery,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isCalled'], ['isCalled']) }} as iscalled,
|
||||
{{ json_extract_scalar('_airbyte_data', ['merchant'], ['merchant']) }} as merchant,
|
||||
{{ json_extract_scalar('_airbyte_data', ['pickupAt'], ['pickupAt']) }} as pickupat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['supplier'], ['supplier']) }} as supplier,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['inventory'], ['inventory']) }} as inventory,
|
||||
{{ json_extract_scalar('_airbyte_data', ['invoiceId'], ['invoiceId']) }} as invoiceid,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isDeleted'], ['isDeleted']) }} as isdeleted,
|
||||
{{ json_extract_scalar('_airbyte_data', ['promotion'], ['promotion']) }} as promotion,
|
||||
{{ json_extract_scalar('_airbyte_data', ['requestId'], ['requestId']) }} as requestid,
|
||||
{{ json_extract_scalar('_airbyte_data', ['restockAt'], ['restockAt']) }} as restockat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['sendEmail'], ['sendEmail']) }} as sendemail,
|
||||
{{ json_extract_scalar('_airbyte_data', ['totalItem'], ['totalItem']) }} as totalitem,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['userAgent'], ['userAgent']) }} as useragent,
|
||||
{{ json_extract_scalar('_airbyte_data', ['wholesale'], ['wholesale']) }} as wholesale,
|
||||
{{ json_extract_scalar('_airbyte_data', ['approvedAt'], ['approvedAt']) }} as approvedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['cashbackAt'], ['cashbackAt']) }} as cashbackat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isPreorder'], ['isPreorder']) }} as ispreorder,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isReviewed'], ['isReviewed']) }} as isreviewed,
|
||||
{{ json_extract_scalar('_airbyte_data', ['rejectedAt'], ['rejectedAt']) }} as rejectedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['deliveredAt'], ['deliveredAt']) }} as deliveredat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['deliveringAt'], ['deliveringAt']) }} as deliveringat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['deliveryCode'], ['deliveryCode']) }} as deliverycode,
|
||||
{{ json_extract_scalar('_airbyte_data', ['searchString'], ['searchString']) }} as searchstring,
|
||||
{{ json_extract_scalar('_airbyte_data', ['staffApprove'], ['staffApprove']) }} as staffapprove,
|
||||
{{ json_extract_scalar('_airbyte_data', ['teamMemberId'], ['teamMemberId']) }} as teammemberid,
|
||||
{{ json_extract_scalar('_airbyte_data', ['processStatus'], ['processStatus']) }} as processstatus,
|
||||
{{ json_extract_scalar('_airbyte_data', ['hookTimeLastAt'], ['hookTimeLastAt']) }} as hooktimelastat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isAssignCoupon'], ['isAssignCoupon']) }} as isassigncoupon,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isAutoApproved'], ['isAutoApproved']) }} as isautoapproved,
|
||||
{{ json_extract_scalar('_airbyte_data', ['outboundRequest'], ['outboundRequest']) }} as outboundrequest,
|
||||
{{ json_extract_scalar('_airbyte_data', ['trackingCodeURL'], ['trackingCodeURL']) }} as trackingcodeurl,
|
||||
{{ json_extract_scalar('_airbyte_data', ['trackingCode'], ['trackingcode']) }} as trackingCode,
|
||||
{{ json_extract_scalar('_airbyte_data', ['waitingCancelBy'], ['waitingCancelBy']) }} as waitingcancelby,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isChangeDelivery'], ['isChangeDelivery']) }} as ischangedelivery,
|
||||
{{ json_extract_scalar('_airbyte_data', ['estimateCashbackAt'], ['estimateCashbackAt']) }} as estimatecashbackat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['fromNewActiveBuyer'], ['fromNewActiveBuyer']) }} as fromnewactivebuyer,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isWaitingCancelled'], ['isWaitingCancelled']) }} as iswaitingcancelled,
|
||||
{{ json_extract_scalar('_airbyte_data', ['fromNewActiveSeller'], ['fromNewActiveSeller']) }} as fromnewactiveseller,
|
||||
{{ json_extract_scalar('_airbyte_data', ['waitingCancelReason'], ['waitingCancelReason']) }} as waitingcancelreason,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_orders') }} as table_alias
|
||||
-- orders
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('orders_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast(code as {{ dbt_utils.type_string() }}) as code,
|
||||
cast({{ adapter.quote('date') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('date') }},
|
||||
cast({{ adapter.quote('hour') }} as {{ dbt_utils.type_float() }}) as {{ adapter.quote('hour') }},
|
||||
cast(note as {{ dbt_utils.type_string() }}) as note,
|
||||
tags,
|
||||
cast({{ adapter.quote('user') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('user') }},
|
||||
cast(price as {{ dbt_utils.type_string() }}) as price,
|
||||
{{ cast_to_boolean('banned') }} as banned,
|
||||
cast(reason as {{ dbt_utils.type_string() }}) as reason,
|
||||
cast({{ adapter.quote('source') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('source') }},
|
||||
cast(status as {{ dbt_utils.type_string() }}) as status,
|
||||
cast(teamid as {{ dbt_utils.type_string() }}) as teamid,
|
||||
cast(payment as {{ dbt_utils.type_string() }}) as payment,
|
||||
cast(remarks as {{ dbt_utils.type_string() }}) as remarks,
|
||||
cast(customer as {{ dbt_utils.type_string() }}) as customer,
|
||||
cast(delivery as {{ dbt_utils.type_string() }}) as delivery,
|
||||
{{ cast_to_boolean('iscalled') }} as iscalled,
|
||||
cast(merchant as {{ dbt_utils.type_string() }}) as merchant,
|
||||
cast(pickupat as {{ dbt_utils.type_string() }}) as pickupat,
|
||||
cast(supplier as {{ dbt_utils.type_string() }}) as supplier,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
cast(inventory as {{ dbt_utils.type_string() }}) as inventory,
|
||||
cast(invoiceid as {{ dbt_utils.type_string() }}) as invoiceid,
|
||||
{{ cast_to_boolean('isdeleted') }} as isdeleted,
|
||||
cast(promotion as {{ dbt_utils.type_string() }}) as promotion,
|
||||
cast(requestid as {{ dbt_utils.type_string() }}) as requestid,
|
||||
cast(restockat as {{ dbt_utils.type_string() }}) as restockat,
|
||||
cast(sendemail as {{ dbt_utils.type_string() }}) as sendemail,
|
||||
cast(totalitem as {{ dbt_utils.type_float() }}) as totalitem,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
cast(useragent as {{ dbt_utils.type_string() }}) as useragent,
|
||||
{{ cast_to_boolean('wholesale') }} as wholesale,
|
||||
cast(approvedat as {{ dbt_utils.type_string() }}) as approvedat,
|
||||
cast(cashbackat as {{ dbt_utils.type_string() }}) as cashbackat,
|
||||
{{ cast_to_boolean('ispreorder') }} as ispreorder,
|
||||
{{ cast_to_boolean('isreviewed') }} as isreviewed,
|
||||
cast(rejectedat as {{ dbt_utils.type_string() }}) as rejectedat,
|
||||
cast(deliveredat as {{ dbt_utils.type_string() }}) as deliveredat,
|
||||
cast(deliveringat as {{ dbt_utils.type_string() }}) as deliveringat,
|
||||
cast(deliverycode as {{ dbt_utils.type_string() }}) as deliverycode,
|
||||
cast(searchstring as {{ dbt_utils.type_string() }}) as searchstring,
|
||||
cast(staffapprove as {{ dbt_utils.type_string() }}) as staffapprove,
|
||||
cast(teammemberid as {{ dbt_utils.type_string() }}) as teammemberid,
|
||||
cast(processstatus as {{ dbt_utils.type_string() }}) as processstatus,
|
||||
cast(hooktimelastat as {{ dbt_utils.type_string() }}) as hooktimelastat,
|
||||
{{ cast_to_boolean('isassigncoupon') }} as isassigncoupon,
|
||||
{{ cast_to_boolean('isautoapproved') }} as isautoapproved,
|
||||
cast(outboundrequest as {{ dbt_utils.type_string() }}) as outboundrequest,
|
||||
cast(trackingcodeurl as {{ dbt_utils.type_string() }}) as trackingcodeurl,
|
||||
cast(trackingcode as {{ dbt_utils.type_string() }}) as trackingcode,
|
||||
cast(waitingcancelby as {{ dbt_utils.type_string() }}) as waitingcancelby,
|
||||
{{ cast_to_boolean('ischangedelivery') }} as ischangedelivery,
|
||||
cast(estimatecashbackat as {{ dbt_utils.type_string() }}) as estimatecashbackat,
|
||||
{{ cast_to_boolean('fromnewactivebuyer') }} as fromnewactivebuyer,
|
||||
{{ cast_to_boolean('iswaitingcancelled') }} as iswaitingcancelled,
|
||||
{{ cast_to_boolean('fromnewactiveseller') }} as fromnewactiveseller,
|
||||
cast(waitingcancelreason as {{ dbt_utils.type_string() }}) as waitingcancelreason,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('orders_ab1') }}
|
||||
-- orders
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('orders_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
'code',
|
||||
adapter.quote('date'),
|
||||
adapter.quote('hour'),
|
||||
'note',
|
||||
array_to_string('tags'),
|
||||
adapter.quote('user'),
|
||||
'price',
|
||||
boolean_to_string('banned'),
|
||||
'reason',
|
||||
adapter.quote('source'),
|
||||
'status',
|
||||
'teamid',
|
||||
'payment',
|
||||
'remarks',
|
||||
'customer',
|
||||
'delivery',
|
||||
boolean_to_string('iscalled'),
|
||||
'merchant',
|
||||
'pickupat',
|
||||
'supplier',
|
||||
'createdat',
|
||||
'inventory',
|
||||
'invoiceid',
|
||||
boolean_to_string('isdeleted'),
|
||||
'promotion',
|
||||
'requestid',
|
||||
'restockat',
|
||||
'sendemail',
|
||||
'totalitem',
|
||||
'updatedat',
|
||||
'useragent',
|
||||
boolean_to_string('wholesale'),
|
||||
'approvedat',
|
||||
'cashbackat',
|
||||
boolean_to_string('ispreorder'),
|
||||
boolean_to_string('isreviewed'),
|
||||
'rejectedat',
|
||||
'deliveredat',
|
||||
'deliveringat',
|
||||
'deliverycode',
|
||||
'searchstring',
|
||||
'staffapprove',
|
||||
'teammemberid',
|
||||
'processstatus',
|
||||
'hooktimelastat',
|
||||
boolean_to_string('isassigncoupon'),
|
||||
boolean_to_string('isautoapproved'),
|
||||
'outboundrequest',
|
||||
'trackingcodeurl',
|
||||
'trackingcode',
|
||||
'waitingcancelby',
|
||||
boolean_to_string('ischangedelivery'),
|
||||
'estimatecashbackat',
|
||||
boolean_to_string('fromnewactivebuyer'),
|
||||
boolean_to_string('iswaitingcancelled'),
|
||||
boolean_to_string('fromnewactiveseller'),
|
||||
'waitingcancelreason',
|
||||
]) }} as _airbyte_orders_hashid,
|
||||
tmp.*
|
||||
from {{ ref('orders_ab2') }} tmp
|
||||
-- orders
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_product_categories') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['icon'], ['icon']) }} as icon,
|
||||
{{ json_extract_scalar('_airbyte_data', ['name'], ['name']) }} as {{ adapter.quote('name') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['color'], ['color']) }} as color,
|
||||
{{ json_extract_scalar('_airbyte_data', ['order'], ['order']) }} as {{ adapter.quote('order') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['active'], ['active']) }} as active,
|
||||
{{ json_extract_array('_airbyte_data', ['covers'], ['covers']) }} as covers,
|
||||
{{ json_extract_scalar('_airbyte_data', ['featured'], ['featured']) }} as featured,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['totalProduct'], ['totalProduct']) }} as totalproduct,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_product_categories') }} as table_alias
|
||||
-- product_categories
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('product_categories_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast(icon as {{ dbt_utils.type_string() }}) as icon,
|
||||
cast({{ adapter.quote('name') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('name') }},
|
||||
cast(color as {{ dbt_utils.type_string() }}) as color,
|
||||
cast({{ adapter.quote('order') }} as {{ dbt_utils.type_float() }}) as {{ adapter.quote('order') }},
|
||||
{{ cast_to_boolean('active') }} as active,
|
||||
covers,
|
||||
{{ cast_to_boolean('featured') }} as featured,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
cast(totalproduct as {{ dbt_utils.type_float() }}) as totalproduct,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('product_categories_ab1') }}
|
||||
-- product_categories
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('product_categories_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
'icon',
|
||||
adapter.quote('name'),
|
||||
'color',
|
||||
adapter.quote('order'),
|
||||
boolean_to_string('active'),
|
||||
array_to_string('covers'),
|
||||
boolean_to_string('featured'),
|
||||
'createdat',
|
||||
'updatedat',
|
||||
'totalproduct',
|
||||
]) }} as _airbyte_product_categories_hashid,
|
||||
tmp.*
|
||||
from {{ ref('product_categories_ab2') }} tmp
|
||||
-- product_categories
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_product_sku') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['sku'], ['sku']) }} as sku,
|
||||
{{ json_extract_scalar('_airbyte_data', ['info'], ['info']) }} as info,
|
||||
{{ json_extract_scalar('_airbyte_data', ['name'], ['name']) }} as {{ adapter.quote('name') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['type'], ['type']) }} as {{ adapter.quote('type') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['brand'], ['brand']) }} as brand,
|
||||
{{ json_extract_scalar('_airbyte_data', ['cover'], ['cover']) }} as cover,
|
||||
{{ json_extract_scalar('_airbyte_data', ['price'], ['price']) }} as price,
|
||||
{{ json_extract_scalar('_airbyte_data', ['active'], ['active']) }} as active,
|
||||
{{ json_extract_scalar('_airbyte_data', ['author'], ['author']) }} as author,
|
||||
{{ json_extract_scalar('_airbyte_data', ['source'], ['source']) }} as {{ adapter.quote('source') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['display'], ['display']) }} as display,
|
||||
{{ json_extract_scalar('_airbyte_data', ['picture'], ['picture']) }} as picture,
|
||||
{{ json_extract_scalar('_airbyte_data', ['product'], ['product']) }} as product,
|
||||
{{ json_extract_scalar('_airbyte_data', ['version'], ['version']) }} as {{ adapter.quote('version') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['groupSku'], ['groupSku']) }} as groupsku,
|
||||
{{ json_extract_scalar('_airbyte_data', ['unitCode'], ['unitCode']) }} as unitcode,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['restockAt'], ['restockAt']) }} as restockat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['statistic'], ['statistic']) }} as statistic,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['couponInfo'], ['couponInfo']) }} as couponinfo,
|
||||
{{ json_extract_array('_airbyte_data', ['properties'], ['properties']) }} as properties,
|
||||
{{ json_extract_scalar('_airbyte_data', ['canPreorder'], ['canPreorder']) }} as canpreorder,
|
||||
{{ json_extract_scalar('_airbyte_data', ['supplierSku'], ['supplierSku']) }} as suppliersku,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isOutOfStock'], ['isOutOfStock']) }} as isoutofstock,
|
||||
{{ json_extract_scalar('_airbyte_data', ['pricePercent'], ['pricePercent']) }} as pricepercent,
|
||||
{{ json_extract_scalar('_airbyte_data', ['searchString'], ['searchString']) }} as searchstring,
|
||||
{{ json_extract_scalar('_airbyte_data', ['quantity'], ['quantity']) }} as quantity,
|
||||
{{ json_extract_scalar('_airbyte_data', ['displayInventory'], ['displayInventory']) }} as displayinventory,
|
||||
{{ json_extract_scalar('_airbyte_data', ['showRemainingQuantity'], ['showRemainingQuantity']) }} as showremainingquantity,
|
||||
{{ json_extract_scalar('_airbyte_data', ['quantity_aibyte_transform'], ['quantity_aibyte_transform']) }} as quantity_aibyte_transform,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_product_sku') }} as table_alias
|
||||
-- product_sku
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('product_sku_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast(sku as {{ dbt_utils.type_string() }}) as sku,
|
||||
cast(info as {{ dbt_utils.type_string() }}) as info,
|
||||
cast({{ adapter.quote('name') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('name') }},
|
||||
cast({{ adapter.quote('type') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('type') }},
|
||||
cast(brand as {{ dbt_utils.type_string() }}) as brand,
|
||||
cast(cover as {{ dbt_utils.type_string() }}) as cover,
|
||||
cast(price as {{ dbt_utils.type_string() }}) as price,
|
||||
{{ cast_to_boolean('active') }} as active,
|
||||
cast(author as {{ dbt_utils.type_string() }}) as author,
|
||||
cast({{ adapter.quote('source') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('source') }},
|
||||
{{ cast_to_boolean('display') }} as display,
|
||||
cast(picture as {{ dbt_utils.type_string() }}) as picture,
|
||||
cast(product as {{ dbt_utils.type_string() }}) as product,
|
||||
cast({{ adapter.quote('version') }} as {{ dbt_utils.type_float() }}) as {{ adapter.quote('version') }},
|
||||
cast(groupsku as {{ dbt_utils.type_string() }}) as groupsku,
|
||||
cast(unitcode as {{ dbt_utils.type_string() }}) as unitcode,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
cast(restockat as {{ dbt_utils.type_string() }}) as restockat,
|
||||
cast(statistic as {{ dbt_utils.type_string() }}) as statistic,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
cast(couponinfo as {{ dbt_utils.type_string() }}) as couponinfo,
|
||||
properties,
|
||||
{{ cast_to_boolean('canpreorder') }} as canpreorder,
|
||||
cast(suppliersku as {{ dbt_utils.type_string() }}) as suppliersku,
|
||||
{{ cast_to_boolean('isoutofstock') }} as isoutofstock,
|
||||
cast(pricepercent as {{ dbt_utils.type_string() }}) as pricepercent,
|
||||
cast(searchstring as {{ dbt_utils.type_string() }}) as searchstring,
|
||||
{{ cast_to_boolean('displayinventory') }} as displayinventory,
|
||||
{{ cast_to_boolean('showremainingquantity') }} as showremainingquantity,
|
||||
cast(quantity as {{ dbt_utils.type_string() }}) as quantity,
|
||||
cast(quantity_aibyte_transform as {{ dbt_utils.type_string() }}) as quantity_aibyte_transform,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('product_sku_ab1') }}
|
||||
-- product_sku
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('product_sku_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
'sku',
|
||||
'info',
|
||||
adapter.quote('name'),
|
||||
adapter.quote('type'),
|
||||
'brand',
|
||||
'cover',
|
||||
'price',
|
||||
boolean_to_string('active'),
|
||||
'author',
|
||||
adapter.quote('source'),
|
||||
boolean_to_string('display'),
|
||||
'picture',
|
||||
'product',
|
||||
adapter.quote('version'),
|
||||
'groupsku',
|
||||
'unitcode',
|
||||
'createdat',
|
||||
'restockat',
|
||||
'statistic',
|
||||
'updatedat',
|
||||
'couponinfo',
|
||||
array_to_string('properties'),
|
||||
boolean_to_string('canpreorder'),
|
||||
'suppliersku',
|
||||
boolean_to_string('isoutofstock'),
|
||||
'pricepercent',
|
||||
'searchstring',
|
||||
'quantity',
|
||||
boolean_to_string('displayinventory'),
|
||||
boolean_to_string('showremainingquantity'),
|
||||
'quantity_aibyte_transform',
|
||||
]) }} as _airbyte_product_sku_hashid,
|
||||
tmp.*
|
||||
from {{ ref('product_sku_ab2') }} tmp
|
||||
-- product_sku
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_product_sku_versions') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['sku'], ['sku']) }} as sku,
|
||||
{{ json_extract_scalar('_airbyte_data', ['info'], ['info']) }} as info,
|
||||
{{ json_extract_scalar('_airbyte_data', ['name'], ['name']) }} as {{ adapter.quote('name') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['price'], ['price']) }} as price,
|
||||
{{ json_extract_scalar('_airbyte_data', ['source'], ['source']) }} as {{ adapter.quote('source') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['codeSku'], ['codeSku']) }} as codesku,
|
||||
{{ json_extract_scalar('_airbyte_data', ['picture'], ['picture']) }} as picture,
|
||||
{{ json_extract_scalar('_airbyte_data', ['product'], ['product']) }} as product,
|
||||
{{ json_extract_scalar('_airbyte_data', ['statistic'], ['statistic']) }} as statistic,
|
||||
{{ json_extract_scalar('_airbyte_data', ['version'], ['version']) }} as {{ adapter.quote('version') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_array('_airbyte_data', ['properties'], ['properties']) }} as properties,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_product_sku_versions') }} as table_alias
|
||||
-- product_sku_versions
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('product_sku_versions_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast(sku as {{ dbt_utils.type_string() }}) as sku,
|
||||
cast(info as {{ dbt_utils.type_string() }}) as info,
|
||||
cast({{ adapter.quote('name') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('name') }},
|
||||
cast(price as {{ dbt_utils.type_string() }}) as price,
|
||||
cast({{ adapter.quote('source') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('source') }},
|
||||
cast(codesku as {{ dbt_utils.type_string() }}) as codesku,
|
||||
cast(picture as {{ dbt_utils.type_string() }}) as picture,
|
||||
cast(product as {{ dbt_utils.type_string() }}) as product,
|
||||
cast({{ adapter.quote('version') }} as {{ dbt_utils.type_float() }}) as {{ adapter.quote('version') }},
|
||||
cast(statistic as {{ dbt_utils.type_string() }}) as statistic,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
properties,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('product_sku_versions_ab1') }}
|
||||
-- product_sku_versions
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('product_sku_versions_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
'sku',
|
||||
'info',
|
||||
adapter.quote('name'),
|
||||
'price',
|
||||
adapter.quote('source'),
|
||||
'codesku',
|
||||
'picture',
|
||||
'product',
|
||||
'statistic',
|
||||
adapter.quote('version'),
|
||||
'createdat',
|
||||
'updatedat',
|
||||
array_to_string('properties'),
|
||||
]) }} as _airbyte_product_sku_versions_hashid,
|
||||
tmp.*
|
||||
from {{ ref('product_sku_versions_ab2') }} tmp
|
||||
-- product_sku_versions
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_product_sub_categories') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['name'], ['name']) }} as {{ adapter.quote('name') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['order'], ['order']) }} as {{ adapter.quote('order') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['active'], ['active']) }} as active,
|
||||
{{ json_extract_scalar('_airbyte_data', ['parent'], ['parent']) }} as parent,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['searchString'], ['searchString']) }} as searchstring,
|
||||
{{ json_extract_scalar('_airbyte_data', ['totalProduct'], ['totalProduct']) }} as totalproduct,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_product_sub_categories') }} as table_alias
|
||||
-- product_sub_categories
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('product_sub_categories_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast({{ adapter.quote('name') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('name') }},
|
||||
cast({{ adapter.quote('order') }} as {{ dbt_utils.type_float() }}) as {{ adapter.quote('order') }},
|
||||
{{ cast_to_boolean('active') }} as active,
|
||||
cast(parent as {{ dbt_utils.type_string() }}) as parent,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
cast(searchstring as {{ dbt_utils.type_string() }}) as searchstring,
|
||||
cast(totalproduct as {{ dbt_utils.type_float() }}) as totalproduct,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('product_sub_categories_ab1') }}
|
||||
-- product_sub_categories
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('product_sub_categories_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
adapter.quote('name'),
|
||||
adapter.quote('order'),
|
||||
boolean_to_string('active'),
|
||||
'parent',
|
||||
'createdat',
|
||||
'updatedat',
|
||||
'searchstring',
|
||||
'totalproduct',
|
||||
]) }} as _airbyte_product_sub_categories_hashid,
|
||||
tmp.*
|
||||
from {{ ref('product_sub_categories_ab2') }} tmp
|
||||
-- product_sub_categories
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_products') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['desc'], ['desc']) }} as {{ adapter.quote('desc') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['info'], ['info']) }} as info,
|
||||
{{ json_extract_scalar('_airbyte_data', ['name'], ['name']) }} as {{ adapter.quote('name') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['temp'], ['temp']) }} as {{ adapter.quote('temp') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['brand'], ['brand']) }} as brand,
|
||||
{{ json_extract_scalar('_airbyte_data', ['order'], ['order']) }} as {{ adapter.quote('order') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['price'], ['price']) }} as price,
|
||||
{{ json_extract_scalar('_airbyte_data', ['score'], ['score']) }} as score,
|
||||
{{ json_extract_scalar('_airbyte_data', ['active'], ['active']) }} as active,
|
||||
{{ json_extract_scalar('_airbyte_data', ['author'], ['author']) }} as author,
|
||||
{{ json_extract_array('_airbyte_data', ['guides'], ['guides']) }} as guides,
|
||||
{{ json_extract_array('_airbyte_data', ['photos'], ['photos']) }} as photos,
|
||||
{{ json_extract_array('_airbyte_data', ['videos'], ['videos']) }} as videos,
|
||||
{{ json_extract_scalar('_airbyte_data', ['quantity'], ['quantity']) }} as quantity,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_array('_airbyte_data', ['documents'], ['documents']) }} as documents,
|
||||
{{ json_extract_scalar('_airbyte_data', ['shareDesc'], ['shareDesc']) }} as sharedesc,
|
||||
{{ json_extract_scalar('_airbyte_data', ['statistic'], ['statistic']) }} as statistic,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_array('_airbyte_data', ['categories'], ['categories']) }} as categories,
|
||||
{{ json_extract_array('_airbyte_data', ['properties'], ['properties']) }} as properties,
|
||||
{{ json_extract_scalar('_airbyte_data', ['reActiveAt'], ['reActiveAt']) }} as reactiveat,
|
||||
{{ json_extract_array('_airbyte_data', ['inventories'], ['inventories']) }} as inventories,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isOutOfStock'], ['isOutOfStock']) }} as isoutofstock,
|
||||
{{ json_extract_scalar('_airbyte_data', ['pricePercent'], ['pricePercent']) }} as pricepercent,
|
||||
{{ json_extract_scalar('_airbyte_data', ['searchString'], ['searchString']) }} as searchstring,
|
||||
{{ json_extract_array('_airbyte_data', ['subCategories'], ['subCategories']) }} as subcategories,
|
||||
{{ json_extract_scalar('_airbyte_data', ['inactiveReason'], ['inactiveReason']) }} as inactivereason,
|
||||
{{ json_extract_scalar('_airbyte_data', ['propertiesMain'], ['propertiesMain']) }} as propertiesmain,
|
||||
{{ json_extract_scalar('_airbyte_data', ['shareStatistic'], ['shareStatistic']) }} as sharestatistic,
|
||||
{{ json_extract_scalar('_airbyte_data', ['canIssueInvoice'], ['canIssueInvoice']) }} as canissueinvoice,
|
||||
{{ json_extract_scalar('_airbyte_data', ['pendingInactive'], ['pendingInactive']) }} as pendinginactive,
|
||||
{{ json_extract_scalar('_airbyte_data', ['reviewStatistic'], ['reviewStatistic']) }} as reviewstatistic,
|
||||
{{ json_extract_scalar('_airbyte_data', ['pendingInactiveAt'], ['pendingInactiveAt']) }} as pendinginactiveat,
|
||||
{{ json_extract_array('_airbyte_data', ['highlightProperties'], ['highlightProperties']) }} as highlightproperties,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_products') }} as table_alias
|
||||
-- products
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('products_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast({{ adapter.quote('desc') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('desc') }},
|
||||
cast(info as {{ dbt_utils.type_string() }}) as info,
|
||||
cast({{ adapter.quote('name') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('name') }},
|
||||
cast({{ adapter.quote('temp') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('temp') }},
|
||||
cast(brand as {{ dbt_utils.type_string() }}) as brand,
|
||||
cast({{ adapter.quote('order') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('order') }},
|
||||
cast(price as {{ dbt_utils.type_string() }}) as price,
|
||||
cast(score as {{ dbt_utils.type_string() }}) as score,
|
||||
{{ cast_to_boolean('active') }} as active,
|
||||
cast(author as {{ dbt_utils.type_string() }}) as author,
|
||||
guides,
|
||||
photos,
|
||||
videos,
|
||||
cast(quantity as {{ dbt_utils.type_float() }}) as quantity,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
documents,
|
||||
cast(sharedesc as {{ dbt_utils.type_string() }}) as sharedesc,
|
||||
cast(statistic as {{ dbt_utils.type_string() }}) as statistic,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
categories,
|
||||
properties,
|
||||
cast(reactiveat as {{ dbt_utils.type_string() }}) as reactiveat,
|
||||
inventories,
|
||||
{{ cast_to_boolean('isoutofstock') }} as isoutofstock,
|
||||
cast(pricepercent as {{ dbt_utils.type_string() }}) as pricepercent,
|
||||
cast(searchstring as {{ dbt_utils.type_string() }}) as searchstring,
|
||||
subcategories,
|
||||
cast(inactivereason as {{ dbt_utils.type_string() }}) as inactivereason,
|
||||
cast(propertiesmain as {{ dbt_utils.type_string() }}) as propertiesmain,
|
||||
cast(sharestatistic as {{ dbt_utils.type_string() }}) as sharestatistic,
|
||||
{{ cast_to_boolean('canissueinvoice') }} as canissueinvoice,
|
||||
{{ cast_to_boolean('pendinginactive') }} as pendinginactive,
|
||||
cast(reviewstatistic as {{ dbt_utils.type_string() }}) as reviewstatistic,
|
||||
cast(pendinginactiveat as {{ dbt_utils.type_string() }}) as pendinginactiveat,
|
||||
highlightproperties,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('products_ab1') }}
|
||||
-- products
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('products_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
adapter.quote('desc'),
|
||||
'info',
|
||||
adapter.quote('name'),
|
||||
adapter.quote('temp'),
|
||||
'brand',
|
||||
adapter.quote('order'),
|
||||
'price',
|
||||
'score',
|
||||
boolean_to_string('active'),
|
||||
'author',
|
||||
array_to_string('guides'),
|
||||
array_to_string('photos'),
|
||||
array_to_string('videos'),
|
||||
'quantity',
|
||||
'createdat',
|
||||
array_to_string('documents'),
|
||||
'sharedesc',
|
||||
'statistic',
|
||||
'updatedat',
|
||||
array_to_string('categories'),
|
||||
array_to_string('properties'),
|
||||
'reactiveat',
|
||||
array_to_string('inventories'),
|
||||
boolean_to_string('isoutofstock'),
|
||||
'pricepercent',
|
||||
'searchstring',
|
||||
array_to_string('subcategories'),
|
||||
'inactivereason',
|
||||
'propertiesmain',
|
||||
'sharestatistic',
|
||||
boolean_to_string('canissueinvoice'),
|
||||
boolean_to_string('pendinginactive'),
|
||||
'reviewstatistic',
|
||||
'pendinginactiveat',
|
||||
array_to_string('highlightproperties'),
|
||||
]) }} as _airbyte_products_hashid,
|
||||
tmp.*
|
||||
from {{ ref('products_ab2') }} tmp
|
||||
-- products
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_promotion_orders') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['user'], ['user']) }} as {{ adapter.quote('user') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['order'], ['order']) }} as {{ adapter.quote('order') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['title'], ['title']) }} as title,
|
||||
{{ json_extract_scalar('_airbyte_data', ['status'], ['status']) }} as status,
|
||||
{{ json_extract_scalar('_airbyte_data', ['supplier'], ['supplier']) }} as supplier,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['promotion'], ['promotion']) }} as promotion,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['cashbackAt'], ['cashbackAt']) }} as cashbackat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['commission'], ['commission']) }} as commission,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isRejected'], ['isRejected']) }} as isrejected,
|
||||
{{ json_extract_scalar('_airbyte_data', ['quantityPromotion'], ['quantityPromotion']) }} as quantitypromotion,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_promotion_orders') }} as table_alias
|
||||
-- promotion_orders
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('promotion_orders_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast({{ adapter.quote('user') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('user') }},
|
||||
cast({{ adapter.quote('order') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('order') }},
|
||||
cast(title as {{ dbt_utils.type_string() }}) as title,
|
||||
cast(status as {{ dbt_utils.type_string() }}) as status,
|
||||
cast(supplier as {{ dbt_utils.type_string() }}) as supplier,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
cast(promotion as {{ dbt_utils.type_string() }}) as promotion,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
cast(cashbackat as {{ dbt_utils.type_string() }}) as cashbackat,
|
||||
cast(commission as {{ dbt_utils.type_float() }}) as commission,
|
||||
{{ cast_to_boolean('isrejected') }} as isrejected,
|
||||
cast(quantitypromotion as {{ dbt_utils.type_float() }}) as quantitypromotion,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('promotion_orders_ab1') }}
|
||||
-- promotion_orders
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('promotion_orders_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
adapter.quote('user'),
|
||||
adapter.quote('order'),
|
||||
'title',
|
||||
'status',
|
||||
'supplier',
|
||||
'createdat',
|
||||
'promotion',
|
||||
'updatedat',
|
||||
'cashbackat',
|
||||
'commission',
|
||||
boolean_to_string('isrejected'),
|
||||
'quantitypromotion',
|
||||
]) }} as _airbyte_promotion_orders_hashid,
|
||||
tmp.*
|
||||
from {{ ref('promotion_orders_ab2') }} tmp
|
||||
-- promotion_orders
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_promotions') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['rest'], ['rest']) }} as rest,
|
||||
{{ json_extract_scalar('_airbyte_data', ['type'], ['type']) }} as {{ adapter.quote('type') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['endAt'], ['endAt']) }} as endat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['title'], ['title']) }} as title,
|
||||
{{ json_extract_scalar('_airbyte_data', ['active'], ['active']) }} as active,
|
||||
{{ json_extract_scalar('_airbyte_data', ['options'], ['options']) }} as {{ adapter.quote('options') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['startAt'], ['startAt']) }} as startat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['applyFor'], ['applyFor']) }} as applyfor,
|
||||
{{ json_extract_scalar('_airbyte_data', ['quantity'], ['quantity']) }} as quantity,
|
||||
{{ json_extract_scalar('_airbyte_data', ['articleID'], ['articleID']) }} as articleid,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['searchString'], ['searchString']) }} as searchstring,
|
||||
{{ json_extract_scalar('_airbyte_data', ['sharePercent'], ['sharePercent']) }} as sharepercent,
|
||||
{{ json_extract_scalar('_airbyte_data', ['conditionForUser'], ['conditionForUser']) }} as conditionforuser,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isUnlimitedQuantity'], ['isUnlimitedQuantity']) }} as isunlimitedquantity,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isApplyForOrderWholesale'], ['isApplyForOrderWholesale']) }} as isapplyfororderwholesale,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_promotions') }} as table_alias
|
||||
-- promotions
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('promotions_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast(rest as {{ dbt_utils.type_float() }}) as rest,
|
||||
cast({{ adapter.quote('type') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('type') }},
|
||||
cast(endat as {{ dbt_utils.type_string() }}) as endat,
|
||||
cast(title as {{ dbt_utils.type_string() }}) as title,
|
||||
{{ cast_to_boolean('active') }} as active,
|
||||
cast({{ adapter.quote('options') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('options') }},
|
||||
cast(startat as {{ dbt_utils.type_string() }}) as startat,
|
||||
cast(applyfor as {{ dbt_utils.type_string() }}) as applyfor,
|
||||
cast(quantity as {{ dbt_utils.type_float() }}) as quantity,
|
||||
cast(articleid as {{ dbt_utils.type_string() }}) as articleid,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
cast(searchstring as {{ dbt_utils.type_string() }}) as searchstring,
|
||||
cast(sharepercent as {{ dbt_utils.type_string() }}) as sharepercent,
|
||||
cast(conditionforuser as {{ dbt_utils.type_string() }}) as conditionforuser,
|
||||
{{ cast_to_boolean('isunlimitedquantity') }} as isunlimitedquantity,
|
||||
{{ cast_to_boolean('isapplyfororderwholesale') }} as isapplyfororderwholesale,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('promotions_ab1') }}
|
||||
-- promotions
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('promotions_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
'rest',
|
||||
adapter.quote('type'),
|
||||
'endat',
|
||||
'title',
|
||||
boolean_to_string('active'),
|
||||
adapter.quote('options'),
|
||||
'startat',
|
||||
'applyfor',
|
||||
'quantity',
|
||||
'articleid',
|
||||
'createdat',
|
||||
'updatedat',
|
||||
'searchstring',
|
||||
'sharepercent',
|
||||
'conditionforuser',
|
||||
boolean_to_string('isunlimitedquantity'),
|
||||
boolean_to_string('isapplyfororderwholesale'),
|
||||
]) }} as _airbyte_promotions_hashid,
|
||||
tmp.*
|
||||
from {{ ref('promotions_ab2') }} tmp
|
||||
-- promotions
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_referrals') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['user'], ['user']) }} as {{ adapter.quote('user') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['invitee'], ['invitee']) }} as invitee,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['fromSystem'], ['fromSystem']) }} as fromsystem,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_referrals') }} as table_alias
|
||||
-- referrals
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('referrals_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast({{ adapter.quote('user') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('user') }},
|
||||
cast(invitee as {{ dbt_utils.type_string() }}) as invitee,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
{{ cast_to_boolean('fromsystem') }} as fromsystem,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('referrals_ab1') }}
|
||||
-- referrals
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('referrals_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
adapter.quote('user'),
|
||||
'invitee',
|
||||
'createdat',
|
||||
boolean_to_string('fromsystem'),
|
||||
]) }} as _airbyte_referrals_hashid,
|
||||
tmp.*
|
||||
from {{ ref('referrals_ab2') }} tmp
|
||||
-- referrals
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_suppliers') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['tax'], ['tax']) }} as tax,
|
||||
{{ json_extract_scalar('_airbyte_data', ['bank'], ['bank']) }} as bank,
|
||||
{{ json_extract_scalar('_airbyte_data', ['logo'], ['logo']) }} as logo,
|
||||
{{ json_extract_scalar('_airbyte_data', ['name'], ['name']) }} as {{ adapter.quote('name') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['active'], ['active']) }} as active,
|
||||
{{ json_extract_scalar('_airbyte_data', ['checkSum'], ['checkSum']) }} as checksum,
|
||||
{{ json_extract_scalar('_airbyte_data', ['checksum'], ['checksum']) }} as checksum_1,
|
||||
{{ json_extract_scalar('_airbyte_data', ['location'], ['location']) }} as {{ adapter.quote('location') }},
|
||||
{{ json_extract_array('_airbyte_data', ['contracts'], ['contracts']) }} as contracts,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_array('_airbyte_data', ['documents'], ['documents']) }} as documents,
|
||||
{{ json_extract_scalar('_airbyte_data', ['statistic'], ['statistic']) }} as statistic,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_array('_airbyte_data', ['categories'], ['categories']) }} as categories,
|
||||
{{ json_extract_array('_airbyte_data', ['inventories'], ['inventories']) }} as inventories,
|
||||
{{ json_extract_scalar('_airbyte_data', ['identifyCode'], ['identifyCode']) }} as identifycode,
|
||||
{{ json_extract_scalar('_airbyte_data', ['searchString'], ['searchString']) }} as searchstring,
|
||||
{{ json_extract_scalar('_airbyte_data', ['contractNumber'], ['contractNumber']) }} as contractnumber,
|
||||
{{ json_extract_scalar('_airbyte_data', ['reviewStatistic'], ['reviewStatistic']) }} as reviewstatistic,
|
||||
{{ json_extract_scalar('_airbyte_data', ['contractSignedAt'], ['contractSignedAt']) }} as contractsignedat,
|
||||
{{ json_extract_array('_airbyte_data', ['emailForInvoices'], ['emailForInvoices']) }} as emailforinvoices,
|
||||
{{ json_extract_scalar('_airbyte_data', ['contractExpiredAt'], ['contractExpiredAt']) }} as contractexpiredat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['sendInvoiceRequestEveryDay'], ['sendInvoiceRequestEveryDay']) }} as sendinvoicerequesteveryday,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_suppliers') }} as table_alias
|
||||
-- suppliers
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('suppliers_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast(tax as {{ dbt_utils.type_string() }}) as tax,
|
||||
cast(bank as {{ dbt_utils.type_string() }}) as bank,
|
||||
cast(logo as {{ dbt_utils.type_string() }}) as logo,
|
||||
cast({{ adapter.quote('name') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('name') }},
|
||||
{{ cast_to_boolean('active') }} as active,
|
||||
cast(checksum as {{ dbt_utils.type_string() }}) as checksum,
|
||||
cast(checksum_1 as {{ dbt_utils.type_string() }}) as checksum_1,
|
||||
cast({{ adapter.quote('location') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('location') }},
|
||||
contracts,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
documents,
|
||||
cast(statistic as {{ dbt_utils.type_string() }}) as statistic,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
categories,
|
||||
inventories,
|
||||
cast(identifycode as {{ dbt_utils.type_float() }}) as identifycode,
|
||||
cast(searchstring as {{ dbt_utils.type_string() }}) as searchstring,
|
||||
cast(contractnumber as {{ dbt_utils.type_string() }}) as contractnumber,
|
||||
cast(reviewstatistic as {{ dbt_utils.type_string() }}) as reviewstatistic,
|
||||
cast(contractsignedat as {{ dbt_utils.type_string() }}) as contractsignedat,
|
||||
emailforinvoices,
|
||||
cast(contractexpiredat as {{ dbt_utils.type_string() }}) as contractexpiredat,
|
||||
{{ cast_to_boolean('sendinvoicerequesteveryday') }} as sendinvoicerequesteveryday,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('suppliers_ab1') }}
|
||||
-- suppliers
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('suppliers_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
'tax',
|
||||
'bank',
|
||||
'logo',
|
||||
adapter.quote('name'),
|
||||
boolean_to_string('active'),
|
||||
'checksum',
|
||||
'checksum_1',
|
||||
adapter.quote('location'),
|
||||
array_to_string('contracts'),
|
||||
'createdat',
|
||||
array_to_string('documents'),
|
||||
'statistic',
|
||||
'updatedat',
|
||||
array_to_string('categories'),
|
||||
array_to_string('inventories'),
|
||||
'identifycode',
|
||||
'searchstring',
|
||||
'contractnumber',
|
||||
'reviewstatistic',
|
||||
'contractsignedat',
|
||||
array_to_string('emailforinvoices'),
|
||||
'contractexpiredat',
|
||||
boolean_to_string('sendinvoicerequesteveryday'),
|
||||
]) }} as _airbyte_suppliers_hashid,
|
||||
tmp.*
|
||||
from {{ ref('suppliers_ab2') }} tmp
|
||||
-- suppliers
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_team_activities') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['team'], ['team']) }} as team,
|
||||
{{ json_extract_scalar('_airbyte_data', ['action'], ['action']) }} as {{ adapter.quote('action') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['options'], ['options']) }} as {{ adapter.quote('options') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['targetId'], ['targetId']) }} as targetid,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdBy'], ['createdBy']) }} as createdby,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_team_activities') }} as table_alias
|
||||
-- team_activities
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('team_activities_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast(team as {{ dbt_utils.type_string() }}) as team,
|
||||
cast({{ adapter.quote('action') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('action') }},
|
||||
cast({{ adapter.quote('options') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('options') }},
|
||||
cast(targetid as {{ dbt_utils.type_string() }}) as targetid,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
cast(createdby as {{ dbt_utils.type_string() }}) as createdby,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('team_activities_ab1') }}
|
||||
-- team_activities
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('team_activities_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
'team',
|
||||
adapter.quote('action'),
|
||||
adapter.quote('options'),
|
||||
'targetid',
|
||||
'createdat',
|
||||
'createdby',
|
||||
]) }} as _airbyte_team_activities_hashid,
|
||||
tmp.*
|
||||
from {{ ref('team_activities_ab2') }} tmp
|
||||
-- team_activities
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_team_bonus') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['team'], ['team']) }} as team,
|
||||
{{ json_extract_scalar('_airbyte_data', ['user'], ['user']) }} as {{ adapter.quote('user') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['status'], ['status']) }} as status,
|
||||
{{ json_extract_scalar('_airbyte_data', ['percent'], ['percent']) }} as {{ adapter.quote('percent') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['targetId'], ['targetId']) }} as targetid,
|
||||
{{ json_extract_scalar('_airbyte_data', ['teamName'], ['teamName']) }} as teamname,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['teamLevel'], ['teamLevel']) }} as teamlevel,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['commission'], ['commission']) }} as commission,
|
||||
{{ json_extract_scalar('_airbyte_data', ['teamMember'], ['teamMember']) }} as teammember,
|
||||
{{ json_extract_scalar('_airbyte_data', ['teamPromotion'], ['teamPromotion']) }} as teampromotion,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_team_bonus') }} as table_alias
|
||||
-- team_bonus
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
|
||||
-- depends_on: {{ ref('team_bonus_ab1') }}
|
||||
select
|
||||
cast(_id as {{ dbt_utils.type_string() }}) as _id,
|
||||
cast(team as {{ dbt_utils.type_string() }}) as team,
|
||||
cast({{ adapter.quote('user') }} as {{ dbt_utils.type_string() }}) as {{ adapter.quote('user') }},
|
||||
cast(status as {{ dbt_utils.type_string() }}) as status,
|
||||
cast({{ adapter.quote('percent') }} as {{ dbt_utils.type_float() }}) as {{ adapter.quote('percent') }},
|
||||
cast(targetid as {{ dbt_utils.type_string() }}) as targetid,
|
||||
cast(teamname as {{ dbt_utils.type_string() }}) as teamname,
|
||||
cast(createdat as {{ dbt_utils.type_string() }}) as createdat,
|
||||
cast(teamlevel as {{ dbt_utils.type_float() }}) as teamlevel,
|
||||
cast(updatedat as {{ dbt_utils.type_string() }}) as updatedat,
|
||||
cast(commission as {{ dbt_utils.type_float() }}) as commission,
|
||||
cast(teammember as {{ dbt_utils.type_string() }}) as teammember,
|
||||
cast(teampromotion as {{ dbt_utils.type_string() }}) as teampromotion,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ ref('team_bonus_ab1') }}
|
||||
-- team_bonus
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to build a hash column based on the values of this record
|
||||
-- depends_on: {{ ref('team_bonus_ab2') }}
|
||||
select
|
||||
{{ dbt_utils.surrogate_key([
|
||||
'_id',
|
||||
'team',
|
||||
adapter.quote('user'),
|
||||
'status',
|
||||
adapter.quote('percent'),
|
||||
'targetid',
|
||||
'teamname',
|
||||
'createdat',
|
||||
'teamlevel',
|
||||
'updatedat',
|
||||
'commission',
|
||||
'teammember',
|
||||
'teampromotion',
|
||||
]) }} as _airbyte_team_bonus_hashid,
|
||||
tmp.*
|
||||
from {{ ref('team_bonus_ab2') }} tmp
|
||||
-- team_bonus
|
||||
where 1 = 1
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
{{ config(
|
||||
indexes = [{'columns':['_airbyte_emitted_at'],'type':'btree'}],
|
||||
unique_key = '_airbyte_ab_id',
|
||||
schema = "_airbyte_unibag",
|
||||
tags = [ "top-level-intermediate" ]
|
||||
) }}
|
||||
-- SQL model to parse JSON blob stored in a single column and extract into separated field columns as described by the JSON Schema
|
||||
-- depends_on: {{ source('unibag', '_airbyte_raw_team_members') }}
|
||||
select
|
||||
{{ json_extract_scalar('_airbyte_data', ['_id'], ['_id']) }} as _id,
|
||||
{{ json_extract_scalar('_airbyte_data', ['role'], ['role']) }} as {{ adapter.quote('role') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['team'], ['team']) }} as team,
|
||||
{{ json_extract_scalar('_airbyte_data', ['user'], ['user']) }} as {{ adapter.quote('user') }},
|
||||
{{ json_extract_scalar('_airbyte_data', ['isLeft'], ['isLeft']) }} as isleft,
|
||||
{{ json_extract_scalar('_airbyte_data', ['leftAt'], ['leftAt']) }} as leftat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['joinedAt'], ['joinedAt']) }} as joinedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['roleCode'], ['roleCode']) }} as rolecode,
|
||||
{{ json_extract_scalar('_airbyte_data', ['createdAt'], ['createdAt']) }} as createdat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['statistic'], ['statistic']) }} as statistic,
|
||||
{{ json_extract_scalar('_airbyte_data', ['updatedAt'], ['updatedAt']) }} as updatedat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isRemovedBy'], ['isRemovedBy']) }} as isremovedby,
|
||||
{{ json_extract_scalar('_airbyte_data', ['firstOrderAt'], ['firstOrderAt']) }} as firstorderat,
|
||||
{{ json_extract_scalar('_airbyte_data', ['searchString'], ['searchString']) }} as searchstring,
|
||||
{{ json_extract_scalar('_airbyte_data', ['isMemberHomegrown'], ['isMemberHomegrown']) }} as ismemberhomegrown,
|
||||
_airbyte_ab_id,
|
||||
_airbyte_emitted_at,
|
||||
{{ current_timestamp() }} as _airbyte_normalized_at
|
||||
from {{ source('unibag', '_airbyte_raw_team_members') }} as table_alias
|
||||
-- team_members
|
||||
where 1 = 1
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue