From c7b27fb1ce704605a105eea6ed116a386f604714 Mon Sep 17 00:00:00 2001 From: Kian-Meng Ang Date: Sun, 23 Jun 2024 00:00:12 +0800 Subject: [PATCH] Fix typos Found via `typos --hidden --format brief` and `codespell -H codespell -L ines,tthe,nore` --- README.md | 2 +- dbterd/adapters/algos/base.py | 10 +- dbterd/adapters/algos/test_relationship.py | 2 +- dbterd/adapters/base.py | 2 +- dbterd/adapters/dbt_cloud/administrative.py | 2 +- dbterd/adapters/dbt_cloud/graphql.py | 4 +- dbterd/adapters/dbt_cloud/query.py | 2 +- dbterd/adapters/dbt_core/dbt_invocation.py | 2 +- dbterd/adapters/filter.py | 2 +- dbterd/cli/main.py | 6 +- dbterd/cli/params.py | 2 +- dbterd/helpers/file.py | 2 +- docs/index.md | 2 +- docs/nav/guide/cli-references.md | 14 +-- .../download-artifact-from-a-job-run.md | 6 +- .../dbt-cloud/download-artifact-from-a-job.md | 4 +- .../read-artifact-from-an-environment.md | 4 +- docs/nav/guide/targets/generate-d2.md | 2 +- docs/nav/guide/targets/generate-graphviz.md | 2 +- docs/nav/guide/targets/generate-plantuml.md | 2 +- docs/nav/metadata/relationship_type.md | 2 +- mkdocs.yml | 4 +- samples/dbt-constraints/1.3/manifest.json | 36 +++---- samples/dbt-constraints/manifest.json | 36 +++---- samples/dbtresto/manifest.json | 94 +++++++++---------- samples/facebookad/manifest.json | 24 ++--- samples/fivetranlog/manifest.json | 38 ++++---- samples/shopify/manifest.json | 48 +++++----- tests/unit/helpers/test_jsonify.py | 6 +- 29 files changed, 181 insertions(+), 181 deletions(-) diff --git a/README.md b/README.md index 6480eaa..c2b1e78 100644 --- a/README.md +++ b/README.md @@ -53,7 +53,7 @@ dbterd --version dbterd run -ad samples/dbt-constraints -a "test_relationship:(name:foreign_key|c_from:fk_column_name|c_to:pk_column_name)" - # your own sample without commiting to repo + # your own sample without committing to repo dbterd run -ad samples/local -rt model -rt source ``` diff --git a/dbterd/adapters/algos/base.py b/dbterd/adapters/algos/base.py index 7f4b2c2..3af4358 100644 --- a/dbterd/adapters/algos/base.py +++ b/dbterd/adapters/algos/base.py @@ -99,7 +99,7 @@ def get_tables(manifest: Manifest, catalog: Catalog, **kwargs) -> List[Table]: def enrich_tables_from_relationships( tables: List[Table], relationships: List[Ref] ) -> List[Table]: - """Fullfill columns in Table due to `select *` + """Fulfill columns in Table due to `select *` Args: tables (List[Table]): List of Tables @@ -301,7 +301,7 @@ def get_node_exposures_from_metadata(data=[], **kwargs): data (list, optional): Metadata result list. Defaults to []. Returns: - list: List of maping dict {table_name:..., exposure_name=...} + list: List of mapping dict {table_name:..., exposure_name=...} """ exposures = [] for data_item in data: @@ -328,7 +328,7 @@ def get_node_exposures(manifest: Manifest) -> List[Dict[str, str]]: manifest (dict): dbt manifest json Returns: - list: List of maping dict {table_name:..., exposure_name=...} + list: List of mapping dict {table_name:..., exposure_name=...} """ exposures = [] @@ -490,8 +490,8 @@ def get_relationships(manifest: Manifest, **kwargs) -> List[Ref]: return get_unique_refs(refs=refs) -# def get_relationships_by_contraints(manifest: Manifest, **kwargs) -> List[Ref]: -# """Extract relationships from dbt artifacts based on model's configured contraints +# def get_relationships_by_constraints(manifest: Manifest, **kwargs) -> List[Ref]: +# """Extract relationships from dbt artifacts based on model's configured constraints # Args: # manifest (dict): Manifest json diff --git a/dbterd/adapters/algos/test_relationship.py b/dbterd/adapters/algos/test_relationship.py index 7292953..74521b6 100644 --- a/dbterd/adapters/algos/test_relationship.py +++ b/dbterd/adapters/algos/test_relationship.py @@ -87,7 +87,7 @@ def parse( relationships=relationships, tables=tables ) - # Fullfill columns in Tables (due to `select *`) + # Fulfill columns in Tables (due to `select *`) tables = base.enrich_tables_from_relationships( tables=tables, relationships=relationships ) diff --git a/dbterd/adapters/base.py b/dbterd/adapters/base.py index 989296b..5ed6593 100644 --- a/dbterd/adapters/base.py +++ b/dbterd/adapters/base.py @@ -214,7 +214,7 @@ def __set_single_node_selection( Defaults to None. Returns: - dict: Editted kwargs dict + dict: Edited kwargs dict """ if not node_unique_id: return kwargs diff --git a/dbterd/adapters/dbt_cloud/administrative.py b/dbterd/adapters/dbt_cloud/administrative.py index ea7b6f3..df4f610 100644 --- a/dbterd/adapters/dbt_cloud/administrative.py +++ b/dbterd/adapters/dbt_cloud/administrative.py @@ -70,7 +70,7 @@ def download_artifact(self, artifact: str, artifacts_dir: str) -> bool: bool: True is success, False if any errors """ artifact_api_endpoint = getattr(self, f"{artifact}_api_endpoint") - logger.debug(f"Dowloading...[URL: {artifact_api_endpoint}]") + logger.debug(f"Downloading...[URL: {artifact_api_endpoint}]") try: r = requests.get(url=artifact_api_endpoint, headers=self.request_headers) logger.debug(f"Completed [status: {r.status_code}]") diff --git a/dbterd/adapters/dbt_cloud/graphql.py b/dbterd/adapters/dbt_cloud/graphql.py index 2e8fb90..e03ad2c 100644 --- a/dbterd/adapters/dbt_cloud/graphql.py +++ b/dbterd/adapters/dbt_cloud/graphql.py @@ -7,7 +7,7 @@ class GraphQLHelper: """GraphQL Helper class""" def __init__(self, **kwargs) -> None: - """Initilize the required inputs: + """Initialize the required inputs: - Host URL - Bearer Token """ @@ -34,7 +34,7 @@ def query(self, query: str, **variables): query (str): query string Returns: - dict: Query data responsed. None if any exceptions + dict: Query data responses. None if any exceptions """ try: logger.debug( diff --git a/dbterd/adapters/dbt_cloud/query.py b/dbterd/adapters/dbt_cloud/query.py index bb1705a..43f4f22 100644 --- a/dbterd/adapters/dbt_cloud/query.py +++ b/dbterd/adapters/dbt_cloud/query.py @@ -7,7 +7,7 @@ class Query: """ERD Query file helper""" def __init__(self) -> None: - """Initilize the required input: + """Initialize the required input: - Query directory """ self.dir = f"{os.path.dirname(os.path.realpath(__file__))}/include" diff --git a/dbterd/adapters/dbt_core/dbt_invocation.py b/dbterd/adapters/dbt_core/dbt_invocation.py index 260711d..8649097 100644 --- a/dbterd/adapters/dbt_core/dbt_invocation.py +++ b/dbterd/adapters/dbt_core/dbt_invocation.py @@ -52,7 +52,7 @@ def __invoke(self, runner_args: List[str] = []): return r.result def __construct_arguments(self, *args) -> List[str]: - """Enrich the dbt arguements with the based options + """Enrich the dbt arguments with the based options Returns: List[str]: Actual dbt arguments diff --git a/dbterd/adapters/filter.py b/dbterd/adapters/filter.py index 9d3fb3a..ce4b43c 100644 --- a/dbterd/adapters/filter.py +++ b/dbterd/adapters/filter.py @@ -66,7 +66,7 @@ def evaluate_rule(table: Table, rule: str) -> bool: Args: table (Table): Table object to be evaluated - rule (str): Rule defintion + rule (str): Rule definition Returns: bool: True if satisfied all rules diff --git a/dbterd/cli/main.py b/dbterd/cli/main.py index e92ddd2..2a706df 100644 --- a/dbterd/cli/main.py +++ b/dbterd/cli/main.py @@ -13,13 +13,13 @@ # Programmatic invocation class dbterdRunner: - """Support runner for the programatic call""" + """Support runner for the programmatic call""" def __init__(self) -> None: pass def invoke(self, args: List[str]): - """Invoke a command of dbterd programatically + """Invoke a command of dbterd programmatically Args: args (List[str]): dbterd arguments @@ -61,7 +61,7 @@ def dbterd(ctx, **kwargs): def run(ctx, **kwargs): """ Generate ERD file from reading dbt artifact files, - optionally downloading from Administrative API (dbt Cloud) befor hands + optionally downloading from Administrative API (dbt Cloud) before hands """ Executor(ctx).run(**kwargs) diff --git a/dbterd/cli/params.py b/dbterd/cli/params.py index 89c3849..b6242b7 100644 --- a/dbterd/cli/params.py +++ b/dbterd/cli/params.py @@ -117,7 +117,7 @@ def run_params(func): ) @click.option( "--dbt", - help="Flag to indicate the Selecton to follow dbt's one leveraging Programmatic Invocation", + help="Flag to indicate the Selection to follow dbt's one leveraging Programmatic Invocation", is_flag=True, default=False, show_default=True, diff --git a/dbterd/helpers/file.py b/dbterd/helpers/file.py index aadd57d..a362b92 100644 --- a/dbterd/helpers/file.py +++ b/dbterd/helpers/file.py @@ -72,7 +72,7 @@ def supports_long_paths(windll_name="ntdll") -> bool: # pragma: no cover # Eryk Sun says to use `WinDLL('ntdll')` instead of `windll.ntdll` because # of pointer caching in a comment here: # https://stackoverflow.com/a/35097999/11262881 - # I don't know exaclty what he means, but I am inclined to believe him as + # I don't know exactly what he means, but I am inclined to believe him as # he's pretty active on Python windows bugs! else: try: diff --git a/docs/index.md b/docs/index.md index 17fe94e..1ac15cc 100644 --- a/docs/index.md +++ b/docs/index.md @@ -61,7 +61,7 @@ dbterd --version dbterd run -ad samples/dbt-constraints -a "test_relationship:(name:foreign_key|c_from:fk_column_name|c_to:pk_column_name)" - # your own sample without commiting to repo + # your own sample without committing to repo dbterd run -ad samples/local -rt model -rt source ``` diff --git a/docs/nav/guide/cli-references.md b/docs/nav/guide/cli-references.md index fa60971..6b990c6 100644 --- a/docs/nav/guide/cli-references.md +++ b/docs/nav/guide/cli-references.md @@ -63,7 +63,7 @@ Command to generate diagram-as-a-code file ### dbterd run --select (-s) Selection criteria. -> Select all dbt models if not specified, supports mulitple options +> Select all dbt models if not specified, supports multiple options Rules: @@ -102,7 +102,7 @@ Rules: #### `AND` and `OR` logic -- `AND` logic is applied to a single selection splitted by comma (,) +- `AND` logic is applied to a single selection split by comma (,) - `OR` logic is applied to 2+ selection **Examples:** @@ -122,7 +122,7 @@ Rules: ### dbterd run --exclude (-ns) Exclusion criteria. Rules are the same as Selection Criteria. -> Do not exclude any dbt models if not specified, supports mulitple options +> Do not exclude any dbt models if not specified, supports multiple options **Examples:** === "CLI" @@ -226,7 +226,7 @@ In the above: - `c_to`: Configure the test metadata attribute (2) for the referenced column name(s). If (2)'s value is multiple columns, it will concat them all with `_and` wording. Default to `field` !!! tip "For example, if you would use `dbt-constraints` package" - The [dbt-constraints](https://hub.getdbt.com/snowflake-labs/dbt_constraints/latest/) package is using different name of test which is close to the contraint names, in this case, you would need to customize the input string here: + The [dbt-constraints](https://hub.getdbt.com/snowflake-labs/dbt_constraints/latest/) package is using different name of test which is close to the constraint names, in this case, you would need to customize the input string here: ```bash dbterd run \ @@ -300,7 +300,7 @@ Specified dbt catalog.json version ### dbterd run --resource-type (-rt) Specified dbt resource type(seed, model, source, snapshot). -> Default to `["model"]`, supports mulitple options +> Default to `["model"]`, supports multiple options **Examples:** === "CLI" @@ -316,7 +316,7 @@ Specified dbt resource type(seed, model, source, snapshot). ### dbterd run --dbt -Flag to indicate the Selecton to follow dbt's one leveraging Programmatic Invocation +Flag to indicate the Selection to follow dbt's one leveraging Programmatic Invocation > Default to `False` **Examples:** @@ -335,7 +335,7 @@ Flag to indicate the Selecton to follow dbt's one leveraging Programmatic Invoca ### dbterd run --dbt --dbt-auto-artifact -Flag to indicate force running `dbt docs generate` to the targetted project in order to produce the dbt artifact files. +Flag to indicate force running `dbt docs generate` to the targeted project in order to produce the dbt artifact files. This option have to be enabled together with `--dbt` option, and will override the value of `--artifacts-dir` to be using the `/target` dir of the value of `--dbt-project-dir`. diff --git a/docs/nav/guide/dbt-cloud/download-artifact-from-a-job-run.md b/docs/nav/guide/dbt-cloud/download-artifact-from-a-job-run.md index 8efca4c..32fa24a 100644 --- a/docs/nav/guide/dbt-cloud/download-artifact-from-a-job-run.md +++ b/docs/nav/guide/dbt-cloud/download-artifact-from-a-job-run.md @@ -65,7 +65,7 @@ $env:DBTERD_DBT_CLOUD_HOST_URL="your_value" # optional, default = cloud.getdbt.c $env:DBTERD_DBT_CLOUD_API_VERSION="your_value" # optional, default = v2 ``` -## 2. Genrate ERD file +## 2. Generate ERD file We're going to use `--dbt-cloud` option to tell `dbterd` to use dbt Cloud API with all above variables. @@ -83,9 +83,9 @@ and then, here is the sample console log: ```log dbterd - INFO - Run with dbterd==1.0.0 (main.py:54) dbterd - INFO - Using dbt project dir at: C:\Sources\dbterd (base.py:46) -dbterd - INFO - Dowloading...[URL: https://hidden/api/v2/accounts/hidden/runs/hidden/artifacts/manifest.json] (dbt_cloud.py:68) +dbterd - INFO - Downloading...[URL: https://hidden/api/v2/accounts/hidden/runs/hidden/artifacts/manifest.json] (dbt_cloud.py:68) dbterd - INFO - Completed [status: 200] (dbt_cloud.py:71) -dbterd - INFO - Dowloading...[URL: https://hidden/api/v2/accounts/hidden/runs/hidden/artifacts/catalog.json] (dbt_cloud.py:68) +dbterd - INFO - Downloading...[URL: https://hidden/api/v2/accounts/hidden/runs/hidden/artifacts/catalog.json] (dbt_cloud.py:68) dbterd - INFO - Completed [status: 200] (dbt_cloud.py:71) dbterd - INFO - Using dbt artifact dir at: hidden (base.py:73) dbterd - INFO - Collected 4 table(s) and 3 relationship(s) (test_relationship.py:59) diff --git a/docs/nav/guide/dbt-cloud/download-artifact-from-a-job.md b/docs/nav/guide/dbt-cloud/download-artifact-from-a-job.md index d17a021..b063626 100644 --- a/docs/nav/guide/dbt-cloud/download-artifact-from-a-job.md +++ b/docs/nav/guide/dbt-cloud/download-artifact-from-a-job.md @@ -40,9 +40,9 @@ And the sample logs: ```log dbterd - INFO - Run with dbterd==1.0.0 (main.py:54) dbterd - INFO - Using dbt project dir at: C:\Sources\dbterd (base.py:46) -dbterd - INFO - Dowloading...[URL: https://hidden/api/v2/accounts/hidden/jobs/hidden/artifacts/manifest.json] (dbt_cloud.py:68) +dbterd - INFO - Downloading...[URL: https://hidden/api/v2/accounts/hidden/jobs/hidden/artifacts/manifest.json] (dbt_cloud.py:68) dbterd - INFO - Completed [status: 200] (dbt_cloud.py:71) -dbterd - INFO - Dowloading...[URL: https://hidden/api/v2/accounts/hidden/jobs/hidden/artifacts/catalog.json] (dbt_cloud.py:68) +dbterd - INFO - Downloading...[URL: https://hidden/api/v2/accounts/hidden/jobs/hidden/artifacts/catalog.json] (dbt_cloud.py:68) dbterd - INFO - Completed [status: 200] (dbt_cloud.py:71) dbterd - INFO - Using dbt artifact dir at: hidden (base.py:73) dbterd - INFO - Collected 4 table(s) and 3 relationship(s) (test_relationship.py:59) diff --git a/docs/nav/guide/dbt-cloud/read-artifact-from-an-environment.md b/docs/nav/guide/dbt-cloud/read-artifact-from-an-environment.md index 84d6796..c0edb3c 100644 --- a/docs/nav/guide/dbt-cloud/read-artifact-from-an-environment.md +++ b/docs/nav/guide/dbt-cloud/read-artifact-from-an-environment.md @@ -64,7 +64,7 @@ $env:DBTERD_DBT_CLOUD_SERVICE_TOKEN="your_value" $env:DBTERD_DBT_CLOUD_ENVIRONMENT_ID="your_value" ``` -## 2. Genrate ERD file +## 2. Generate ERD file We're going to use a new command as `dbterd run-metadata` to tell `dbterd` to use dbt Cloud Discovery API with all above variables. @@ -74,7 +74,7 @@ The command will be looks like: dbterd run-metadata [-s ] ``` -> Behind the scenes, it will try use to the ERD GraphQL query buit-in at [include/erd_query.gql](https://github.com/datnguye/dbterd/tree/main/dbterd/adapters/dbt_cloud/include/erd_query.gql) +> Behind the scenes, it will try use to the ERD GraphQL query built-in at [include/erd_query.gql](https://github.com/datnguye/dbterd/tree/main/dbterd/adapters/dbt_cloud/include/erd_query.gql) and then, here is the sample console log: diff --git a/docs/nav/guide/targets/generate-d2.md b/docs/nav/guide/targets/generate-d2.md index 559a51d..bced763 100644 --- a/docs/nav/guide/targets/generate-d2.md +++ b/docs/nav/guide/targets/generate-d2.md @@ -27,7 +27,7 @@ curl -fsSL https://d2lang.com/install.sh | sh -s -- -### 3. Embeded into Markdown +### 3. Embedded into Markdown ```markdown # Sample D2 ERD diff --git a/docs/nav/guide/targets/generate-graphviz.md b/docs/nav/guide/targets/generate-graphviz.md index 8c3984c..9fa3528 100644 --- a/docs/nav/guide/targets/generate-graphviz.md +++ b/docs/nav/guide/targets/generate-graphviz.md @@ -23,7 +23,7 @@ sudo apt install graphviz -### 3. Embeded into Markdown +### 3. Embedded into Markdown ```markdown # Sample GraphViz ERD diff --git a/docs/nav/guide/targets/generate-plantuml.md b/docs/nav/guide/targets/generate-plantuml.md index 4eafd3e..72e1f9d 100644 --- a/docs/nav/guide/targets/generate-plantuml.md +++ b/docs/nav/guide/targets/generate-plantuml.md @@ -14,7 +14,7 @@ - Paste the PlantUML content generated as above - Wait for a second and get the URL -### 3. Embeded the PlantUML URL into Markdown +### 3. Embedded the PlantUML URL into Markdown ```markdown ![](https://www.plantuml.com/plantuml/dpng/{your-hash}) diff --git a/docs/nav/metadata/relationship_type.md b/docs/nav/metadata/relationship_type.md index 2c9455f..f954b48 100644 --- a/docs/nav/metadata/relationship_type.md +++ b/docs/nav/metadata/relationship_type.md @@ -21,7 +21,7 @@ Default value: `many-to-one` if the meta config is not specified List of accepted values: -| Relationship Type | Programatic Symbol | +| Relationship Type | Programmatic Symbol | |--------|--------| | one-to-many | 1n | | zero-to-many | 0n | diff --git a/mkdocs.yml b/mkdocs.yml index d2d3ec0..b92edfd 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,7 +1,7 @@ -site_name: DaaC from dbt artifacts +site_name: DataC from dbt artifacts site_url: https://datnguye.github.io/dbterd/ site_author: Dat Nguyen -site_description: Offical documentation of `dbterd` CLI +site_description: Official documentation of `dbterd` CLI repo_name: datnguye/dbterd repo_url: https://github.com/datnguye/dbterd diff --git a/samples/dbt-constraints/1.3/manifest.json b/samples/dbt-constraints/1.3/manifest.json index 0025cdc..21f7f71 100644 --- a/samples/dbt-constraints/1.3/manifest.json +++ b/samples/dbt-constraints/1.3/manifest.json @@ -935,7 +935,7 @@ }, "integration_id": { "name": "integration_id", - "description": "Concatenation of PK colums for the unique and not_null tests", + "description": "Concatenation of PK columns for the unique and not_null tests", "meta": {}, "data_type": null, "quote": null, @@ -1042,7 +1042,7 @@ }, "l_______________integration_id": { "name": "l_______________integration_id", - "description": "Concatenation of PK colums for the unique and not_null tests", + "description": "Concatenation of PK columns for the unique and not_null tests", "meta": {}, "data_type": null, "quote": null, @@ -1172,7 +1172,7 @@ "extra_ctes": [], "relation_name": "\"dbt_constraints\".\"dbt_dat\".\"fact_order_line_missing_orders\"" }, - "test.dbt_constraints_integration_tests.singlular_test": { + "test.dbt_constraints_integration_tests.singular_test": { "compiled": true, "resource_type": "test", "depends_on": { @@ -1201,17 +1201,17 @@ "schema": "dbt_dat_dbt_test__audit", "fqn": [ "dbt_constraints_integration_tests", - "singlular_test" + "singular_test" ], - "unique_id": "test.dbt_constraints_integration_tests.singlular_test", + "unique_id": "test.dbt_constraints_integration_tests.singular_test", "raw_code": "SELECT *\r\nFROM {{ ref('dim_part') }}\r\nWHERE 1 = 2", "language": "sql", "package_name": "dbt_constraints_integration_tests", "root_path": "C:\\Users\\DAT\\Documents\\Sources\\dbt_constraints\\integration_tests", - "path": "singlular_test.sql", + "path": "singular_test.sql", "original_file_path": "tests\\singlular_test.sql", - "name": "singlular_test", - "alias": "singlular_test", + "name": "singular_test", + "alias": "singular_test", "checksum": { "name": "sha256", "checksum": "78445beeca943fc33f9942b9936ca3139bb8d8d521cffd6fafbb23223ef6a99f" @@ -14580,7 +14580,7 @@ "path": "macros\\etc\\datetime.sql", "original_file_path": "macros\\etc\\datetime.sql", "name": "dates_in_range", - "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partiton start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", + "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -15324,7 +15324,7 @@ "path": "macros\\materializations\\models\\incremental\\incremental.sql", "original_file_path": "macros\\materializations\\models\\incremental\\incremental.sql", "name": "materialization_incremental_default", - "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", + "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -16043,7 +16043,7 @@ "path": "macros\\materializations\\models\\table\\table.sql", "original_file_path": "macros\\materializations\\models\\table\\table.sql", "name": "materialization_table_default", - "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", + "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -16265,7 +16265,7 @@ "path": "macros\\materializations\\models\\view\\view.sql", "original_file_path": "macros\\materializations\\models\\view\\view.sql", "name": "materialization_view_default", - "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", + "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": { @@ -16634,7 +16634,7 @@ "path": "macros\\materializations\\seeds\\seed.sql", "original_file_path": "macros\\materializations\\seeds\\seed.sql", "name": "materialization_seed_default", - "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparision later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", + "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -16955,7 +16955,7 @@ "path": "macros\\materializations\\snapshots\\snapshot.sql", "original_file_path": "macros\\materializations\\snapshots\\snapshot.sql", "name": "materialization_snapshot_default", - "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", + "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparison later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -23815,7 +23815,7 @@ "path": "macros\\sql\\width_bucket.sql", "original_file_path": "macros\\sql\\width_bucket.sql", "name": "default__width_bucket", - "macro_sql": "{% macro default__width_bucket(expr, min_value, max_value, num_buckets) -%}\n\n {% set bin_size -%}\n (( {{ max_value }} - {{ min_value }} ) / {{ num_buckets }} )\n {%- endset %}\n (\n -- to break ties when the amount is eaxtly at the bucket egde\n case\n when\n mod(\n {{ dbt.safe_cast(expr, dbt.type_numeric() ) }},\n {{ dbt.safe_cast(bin_size, dbt.type_numeric() ) }}\n ) = 0\n then 1\n else 0\n end\n ) +\n -- Anything over max_value goes the N+1 bucket\n least(\n ceil(\n ({{ expr }} - {{ min_value }})/{{ bin_size }}\n ),\n {{ num_buckets }} + 1\n )\n{%- endmacro %}", + "macro_sql": "{% macro default__width_bucket(expr, min_value, max_value, num_buckets) -%}\n\n {% set bin_size -%}\n (( {{ max_value }} - {{ min_value }} ) / {{ num_buckets }} )\n {%- endset %}\n (\n -- to break ties when the amount is eaxtly at the bucket edge\n case\n when\n mod(\n {{ dbt.safe_cast(expr, dbt.type_numeric() ) }},\n {{ dbt.safe_cast(bin_size, dbt.type_numeric() ) }}\n ) = 0\n then 1\n else 0\n end\n ) +\n -- Anything over max_value goes the N+1 bucket\n least(\n ceil(\n ({{ expr }} - {{ min_value }})/{{ bin_size }}\n ),\n {{ num_buckets }} + 1\n )\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -24079,7 +24079,7 @@ "model.dbt_constraints_integration_tests.fact_order_line_missing_orders": [ "seed.dbt_constraints_integration_tests.lineitem" ], - "test.dbt_constraints_integration_tests.singlular_test": [ + "test.dbt_constraints_integration_tests.singular_test": [ "model.dbt_constraints_integration_tests.dim_part" ], "seed.dbt_constraints_integration_tests.customer": [], @@ -24505,7 +24505,7 @@ "test.dbt_constraints_integration_tests.dbt_constraints_unique_key_dim_part_p_partkey.2f0ed17851", "test.dbt_constraints_integration_tests.dbt_constraints_unique_key_dim_part_p_partkey_seq.fc6c0ddfbf", "test.dbt_constraints_integration_tests.not_null_dim_part_p_partkey.4ed69f1f64", - "test.dbt_constraints_integration_tests.singlular_test", + "test.dbt_constraints_integration_tests.singular_test", "test.dbt_constraints_integration_tests.unique_dim_part_p_partkey.a98e3dbb8d", "test.dbt_constraints_integration_tests.unique_dim_part_p_partkey_seq.469794dc4b" ], @@ -24542,7 +24542,7 @@ "test.dbt_constraints_integration_tests.not_null_fact_order_line_missing_orders_integration_id.9736acc3de", "test.dbt_constraints_integration_tests.unique_fact_order_line_missing_orders_integration_id.d9baa91679" ], - "test.dbt_constraints_integration_tests.singlular_test": [], + "test.dbt_constraints_integration_tests.singular_test": [], "seed.dbt_constraints_integration_tests.customer": [ "model.dbt_constraints_integration_tests.dim_customers", "test.dbt_constraints_integration_tests.dbt_constraints_primary_key_customer_c_custkey.12989fe240", diff --git a/samples/dbt-constraints/manifest.json b/samples/dbt-constraints/manifest.json index 14d1f07..cabac2c 100644 --- a/samples/dbt-constraints/manifest.json +++ b/samples/dbt-constraints/manifest.json @@ -1055,7 +1055,7 @@ }, "integration_id": { "name": "integration_id", - "description": "Concatenation of PK colums for the unique and not_null tests", + "description": "Concatenation of PK columns for the unique and not_null tests", "meta": {}, "data_type": null, "constraints": [], @@ -1187,7 +1187,7 @@ }, "l_______________integration_id": { "name": "l_______________integration_id", - "description": "Concatenation of PK colums for the unique and not_null tests", + "description": "Concatenation of PK columns for the unique and not_null tests", "meta": {}, "data_type": null, "constraints": [], @@ -1362,20 +1362,20 @@ "version": null, "latest_version": null }, - "test.dbt_constraints_integration_tests.singlular_test": { + "test.dbt_constraints_integration_tests.singular_test": { "database": "dbt_constraints", "schema": "dbt_dat_dbt_test__audit", - "name": "singlular_test", + "name": "singular_test", "resource_type": "test", "package_name": "dbt_constraints_integration_tests", - "path": "singlular_test.sql", + "path": "singular_test.sql", "original_file_path": "tests\\singlular_test.sql", - "unique_id": "test.dbt_constraints_integration_tests.singlular_test", + "unique_id": "test.dbt_constraints_integration_tests.singular_test", "fqn": [ "dbt_constraints_integration_tests", - "singlular_test" + "singular_test" ], - "alias": "singlular_test", + "alias": "singular_test", "checksum": { "name": "sha256", "checksum": "92d2593ae78d1f9b0cb99f3760cc5afcc389284cab3e774abff4a0ba86bb53a8" @@ -15530,7 +15530,7 @@ "path": "macros\\etc\\datetime.sql", "original_file_path": "macros\\etc\\datetime.sql", "unique_id": "macro.dbt.dates_in_range", - "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partiton start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", + "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": { "macros": [ "macro.dbt.convert_datetime" @@ -16214,7 +16214,7 @@ "path": "macros\\materializations\\models\\incremental\\incremental.sql", "original_file_path": "macros\\materializations\\models\\incremental\\incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", - "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", + "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": { "macros": [ "macro.dbt.load_cached_relation", @@ -17119,7 +17119,7 @@ "path": "macros\\materializations\\models\\table\\table.sql", "original_file_path": "macros\\materializations\\models\\table\\table.sql", "unique_id": "macro.dbt.materialization_table_default", - "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", + "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": { "macros": [ "macro.dbt.load_cached_relation", @@ -17327,7 +17327,7 @@ "path": "macros\\materializations\\models\\view\\view.sql", "original_file_path": "macros\\materializations\\models\\view\\view.sql", "unique_id": "macro.dbt.materialization_view_default", - "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", + "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": { "macros": [ "macro.dbt.load_cached_relation", @@ -17668,7 +17668,7 @@ "path": "macros\\materializations\\seeds\\seed.sql", "original_file_path": "macros\\materializations\\seeds\\seed.sql", "unique_id": "macro.dbt.materialization_seed_default", - "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparision later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", + "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": { "macros": [ "macro.dbt.should_full_refresh", @@ -17965,7 +17965,7 @@ "path": "macros\\materializations\\snapshots\\snapshot.sql", "original_file_path": "macros\\materializations\\snapshots\\snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", - "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", + "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparison later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": { "macros": [ "macro.dbt.get_or_create_relation", @@ -24356,7 +24356,7 @@ "path": "macros\\sql\\width_bucket.sql", "original_file_path": "macros\\sql\\width_bucket.sql", "unique_id": "macro.dbt_utils.default__width_bucket", - "macro_sql": "{% macro default__width_bucket(expr, min_value, max_value, num_buckets) -%}\n\n {% set bin_size -%}\n (( {{ max_value }} - {{ min_value }} ) / {{ num_buckets }} )\n {%- endset %}\n (\n -- to break ties when the amount is eaxtly at the bucket egde\n case\n when\n mod(\n {{ dbt.safe_cast(expr, dbt.type_numeric() ) }},\n {{ dbt.safe_cast(bin_size, dbt.type_numeric() ) }}\n ) = 0\n then 1\n else 0\n end\n ) +\n -- Anything over max_value goes the N+1 bucket\n least(\n ceil(\n ({{ expr }} - {{ min_value }})/{{ bin_size }}\n ),\n {{ num_buckets }} + 1\n )\n{%- endmacro %}", + "macro_sql": "{% macro default__width_bucket(expr, min_value, max_value, num_buckets) -%}\n\n {% set bin_size -%}\n (( {{ max_value }} - {{ min_value }} ) / {{ num_buckets }} )\n {%- endset %}\n (\n -- to break ties when the amount is eaxtly at the bucket edge\n case\n when\n mod(\n {{ dbt.safe_cast(expr, dbt.type_numeric() ) }},\n {{ dbt.safe_cast(bin_size, dbt.type_numeric() ) }}\n ) = 0\n then 1\n else 0\n end\n ) +\n -- Anything over max_value goes the N+1 bucket\n least(\n ceil(\n ({{ expr }} - {{ min_value }})/{{ bin_size }}\n ),\n {{ num_buckets }} + 1\n )\n{%- endmacro %}", "depends_on": { "macros": [ "macro.dbt.safe_cast", @@ -24605,7 +24605,7 @@ "model.dbt_constraints_integration_tests.fact_order_line_missing_orders": [ "seed.dbt_constraints_integration_tests.lineitem" ], - "test.dbt_constraints_integration_tests.singlular_test": [ + "test.dbt_constraints_integration_tests.singular_test": [ "model.dbt_constraints_integration_tests.dim_part" ], "seed.dbt_constraints_integration_tests.customer": [], @@ -25031,7 +25031,7 @@ "test.dbt_constraints_integration_tests.dbt_constraints_unique_key_dim_part_p_partkey.2f0ed17851", "test.dbt_constraints_integration_tests.dbt_constraints_unique_key_dim_part_p_partkey_seq.fc6c0ddfbf", "test.dbt_constraints_integration_tests.not_null_dim_part_p_partkey.4ed69f1f64", - "test.dbt_constraints_integration_tests.singlular_test", + "test.dbt_constraints_integration_tests.singular_test", "test.dbt_constraints_integration_tests.unique_dim_part_p_partkey.a98e3dbb8d", "test.dbt_constraints_integration_tests.unique_dim_part_p_partkey_seq.469794dc4b" ], @@ -25068,7 +25068,7 @@ "test.dbt_constraints_integration_tests.not_null_fact_order_line_missing_orders_integration_id.9736acc3de", "test.dbt_constraints_integration_tests.unique_fact_order_line_missing_orders_integration_id.d9baa91679" ], - "test.dbt_constraints_integration_tests.singlular_test": [], + "test.dbt_constraints_integration_tests.singular_test": [], "seed.dbt_constraints_integration_tests.customer": [ "model.dbt_constraints_integration_tests.dim_customers", "test.dbt_constraints_integration_tests.dbt_constraints_primary_key_customer_c_custkey.12989fe240", diff --git a/samples/dbtresto/manifest.json b/samples/dbtresto/manifest.json index 83956dd..3fd6ace 100644 --- a/samples/dbtresto/manifest.json +++ b/samples/dbtresto/manifest.json @@ -1996,7 +1996,7 @@ }, "occurrence_pos_1": { "name": "occurrence_pos_1", - "description": "Times that number has appeared at postition 1", + "description": "Times that number has appeared at position 1", "meta": {}, "data_type": null, "quote": null, @@ -2004,7 +2004,7 @@ }, "occurrence_pos_2": { "name": "occurrence_pos_2", - "description": "Tmes that number has appeared at postition 2", + "description": "Tmes that number has appeared at position 2", "meta": {}, "data_type": null, "quote": null, @@ -2012,7 +2012,7 @@ }, "occurrence_pos_3": { "name": "occurrence_pos_3", - "description": "Times that number has appeared at postition 3", + "description": "Times that number has appeared at position 3", "meta": {}, "data_type": null, "quote": null, @@ -2020,7 +2020,7 @@ }, "occurrence_pos_4": { "name": "occurrence_pos_4", - "description": "Times that number has appeared at postition 4", + "description": "Times that number has appeared at position 4", "meta": {}, "data_type": null, "quote": null, @@ -2028,7 +2028,7 @@ }, "occurrence_pos_5": { "name": "occurrence_pos_5", - "description": "Times that number has appeared at postition 5", + "description": "Times that number has appeared at position 5", "meta": {}, "data_type": null, "quote": null, @@ -2036,7 +2036,7 @@ }, "occurrence_pos_6": { "name": "occurrence_pos_6", - "description": "Times that number has appeared at postition 6", + "description": "Times that number has appeared at position 6", "meta": {}, "data_type": null, "quote": null, @@ -2044,7 +2044,7 @@ }, "occurrence_pos_7": { "name": "occurrence_pos_7", - "description": "Times that number has appeared at postition 7", + "description": "Times that number has appeared at position 7", "meta": {}, "data_type": null, "quote": null, @@ -12426,7 +12426,7 @@ "column_name": "prize_name", "file_key_name": "models.dim_prize" }, - "test.dbt_resto.accepted_values_dim_prize_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2nd_Prize__3rd_Prize.a9ef660412": { + "test.dbt_resto.accepted_values_dim_prize_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2and_Prize__3rd_Prize.a9ef660412": { "test_metadata": { "name": "accepted_values", "kwargs": { @@ -12477,16 +12477,16 @@ "power655", "mart", "schema", - "accepted_values_dim_prize_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2nd_Prize__3rd_Prize" + "accepted_values_dim_prize_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2and_Prize__3rd_Prize" ], - "unique_id": "test.dbt_resto.accepted_values_dim_prize_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2nd_Prize__3rd_Prize.a9ef660412", + "unique_id": "test.dbt_resto.accepted_values_dim_prize_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2and_Prize__3rd_Prize.a9ef660412", "raw_code": "{{ test_accepted_values(**_dbt_generic_test_kwargs) }}{{ config(alias=\"accepted_values_dim_prize_8e42716fdcf658a4b79221eeb30200c7\") }}", "language": "sql", "package_name": "dbt_resto", "root_path": "C:\\Users\\DAT\\Documents\\Sources\\dbt-resto\\integration_tests\\dbt_packages\\dbt_resto", "path": "accepted_values_dim_prize_8e42716fdcf658a4b79221eeb30200c7.sql", "original_file_path": "models\\vietlot\\power655\\mart\\schema\\dim_prize.yml", - "name": "accepted_values_dim_prize_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2nd_Prize__3rd_Prize", + "name": "accepted_values_dim_prize_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2and_Prize__3rd_Prize", "alias": "accepted_values_dim_prize_8e42716fdcf658a4b79221eeb30200c7", "checksum": { "name": "none", @@ -15031,7 +15031,7 @@ }, "config": { "enabled": true, - "alias": "relationships_fact_number_9d907b53d09872afe03d5c8b375e88b8", + "alias": "relationships_fact_number_9d907b53d09872safe03d5c8b375e88b8", "schema": "dbt_test__audit", "database": null, "tags": [], @@ -15056,14 +15056,14 @@ "relationships_fact_number_last_appearance_pos_4__date_key__ref_dim_date_" ], "unique_id": "test.dbt_resto.relationships_fact_number_last_appearance_pos_4__date_key__ref_dim_date_.05d8a30d70", - "raw_code": "{{ test_relationships(**_dbt_generic_test_kwargs) }}{{ config(where=\"last_appearance_pos_4 is not null\",alias=\"relationships_fact_number_9d907b53d09872afe03d5c8b375e88b8\") }}", + "raw_code": "{{ test_relationships(**_dbt_generic_test_kwargs) }}{{ config(where=\"last_appearance_pos_4 is not null\",alias=\"relationships_fact_number_9d907b53d09872safe03d5c8b375e88b8\") }}", "language": "sql", "package_name": "dbt_resto", "root_path": "C:\\Users\\DAT\\Documents\\Sources\\dbt-resto\\integration_tests\\dbt_packages\\dbt_resto", - "path": "relationships_fact_number_9d907b53d09872afe03d5c8b375e88b8.sql", + "path": "relationships_fact_number_9d907b53d09872safe03d5c8b375e88b8.sql", "original_file_path": "models\\vietlot\\power655\\mart\\schema\\fact_number.yml", "name": "relationships_fact_number_last_appearance_pos_4__date_key__ref_dim_date_", - "alias": "relationships_fact_number_9d907b53d09872afe03d5c8b375e88b8", + "alias": "relationships_fact_number_9d907b53d09872safe03d5c8b375e88b8", "checksum": { "name": "none", "checksum": "" @@ -15092,7 +15092,7 @@ "deferred": false, "unrendered_config": { "where": "last_appearance_pos_4 is not null", - "alias": "relationships_fact_number_9d907b53d09872afe03d5c8b375e88b8" + "alias": "relationships_fact_number_9d907b53d09872safe03d5c8b375e88b8" }, "created_at": 1694070226.7003434, "compiled_code": "\n \n \n\nwith child as (\n select last_appearance_pos_4 as from_field\n from (select * from \"msdb\".\"mart\".\"fact_number\" where last_appearance_pos_4 is not null) dbt_subquery\n where last_appearance_pos_4 is not null\n),\n\nparent as (\n select date_key as to_field\n from \"msdb\".\"mart\".\"dim_date\"\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n\n", @@ -15729,7 +15729,7 @@ "column_name": "last_box_result_numbers", "file_key_name": "models.fact_number_forecast" }, - "test.dbt_resto.not_null_fact_number_forecast_forecast_1.ded058c32e": { + "test.dbt_resto.not_null_fact_number_forecast_forecast_1.dead058c32e": { "test_metadata": { "name": "not_null", "kwargs": { @@ -15775,7 +15775,7 @@ "schema", "not_null_fact_number_forecast_forecast_1" ], - "unique_id": "test.dbt_resto.not_null_fact_number_forecast_forecast_1.ded058c32e", + "unique_id": "test.dbt_resto.not_null_fact_number_forecast_forecast_1.dead058c32e", "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "package_name": "dbt_resto", @@ -19157,7 +19157,7 @@ "column_name": "sk_box", "file_key_name": "models.staging_power655_box" }, - "test.dbt_resto.not_null_staging_power655_box_sk_box.320d0ede5f": { + "test.dbt_resto.not_null_staging_power655_box_sk_box.320d0edge5f": { "test_metadata": { "name": "not_null", "kwargs": { @@ -19202,7 +19202,7 @@ "staging", "not_null_staging_power655_box_sk_box" ], - "unique_id": "test.dbt_resto.not_null_staging_power655_box_sk_box.320d0ede5f", + "unique_id": "test.dbt_resto.not_null_staging_power655_box_sk_box.320d0edge5f", "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "package_name": "dbt_resto", @@ -21414,7 +21414,7 @@ "column_name": "prize_name", "file_key_name": "models.staging_power655_box_detail" }, - "test.dbt_resto.accepted_values_staging_power655_box_detail_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2nd_Prize__3rd_Prize.8d59a2292d": { + "test.dbt_resto.accepted_values_staging_power655_box_detail_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2and_Prize__3rd_Prize.8d59a2292d": { "test_metadata": { "name": "accepted_values", "kwargs": { @@ -21464,16 +21464,16 @@ "vietlot", "power655", "staging", - "accepted_values_staging_power655_box_detail_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2nd_Prize__3rd_Prize" + "accepted_values_staging_power655_box_detail_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2and_Prize__3rd_Prize" ], - "unique_id": "test.dbt_resto.accepted_values_staging_power655_box_detail_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2nd_Prize__3rd_Prize.8d59a2292d", + "unique_id": "test.dbt_resto.accepted_values_staging_power655_box_detail_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2and_Prize__3rd_Prize.8d59a2292d", "raw_code": "{{ test_accepted_values(**_dbt_generic_test_kwargs) }}{{ config(alias=\"accepted_values_staging_power6_5de36d9ba4c0de74dccf230338059140\") }}", "language": "sql", "package_name": "dbt_resto", "root_path": "C:\\Users\\DAT\\Documents\\Sources\\dbt-resto\\integration_tests\\dbt_packages\\dbt_resto", "path": "accepted_values_staging_power6_5de36d9ba4c0de74dccf230338059140.sql", "original_file_path": "models\\vietlot\\power655\\staging\\staging_power655_box_detail.yml", - "name": "accepted_values_staging_power655_box_detail_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2nd_Prize__3rd_Prize", + "name": "accepted_values_staging_power655_box_detail_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2and_Prize__3rd_Prize", "alias": "accepted_values_staging_power6_5de36d9ba4c0de74dccf230338059140", "checksum": { "name": "none", @@ -23196,7 +23196,7 @@ "path": "macros\\materializations\\snapshots\\snapshot.sql", "original_file_path": "macros\\materializations\\snapshots\\snapshot.sql", "name": "sqlserver__create_columns", - "macro_sql": "{% macro sqlserver__create_columns(relation, columns) %}\n {# default__ macro uses \"add column\"\n TSQL preferes just \"add\"\n #}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", + "macro_sql": "{% macro sqlserver__create_columns(relation, columns) %}\n {# default__ macro uses \"add column\"\n TSQL prefers just \"add\"\n #}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -25685,7 +25685,7 @@ "path": "macros\\etc\\datetime.sql", "original_file_path": "macros\\etc\\datetime.sql", "name": "dates_in_range", - "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partiton start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", + "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -26429,7 +26429,7 @@ "path": "macros\\materializations\\models\\incremental\\incremental.sql", "original_file_path": "macros\\materializations\\models\\incremental\\incremental.sql", "name": "materialization_incremental_default", - "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", + "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -27148,7 +27148,7 @@ "path": "macros\\materializations\\models\\table\\table.sql", "original_file_path": "macros\\materializations\\models\\table\\table.sql", "name": "materialization_table_default", - "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", + "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -27370,7 +27370,7 @@ "path": "macros\\materializations\\models\\view\\view.sql", "original_file_path": "macros\\materializations\\models\\view\\view.sql", "name": "materialization_view_default", - "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", + "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": { @@ -27739,7 +27739,7 @@ "path": "macros\\materializations\\seeds\\seed.sql", "original_file_path": "macros\\materializations\\seeds\\seed.sql", "name": "materialization_seed_default", - "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparision later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", + "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -28060,7 +28060,7 @@ "path": "macros\\materializations\\snapshots\\snapshot.sql", "original_file_path": "macros\\materializations\\snapshots\\snapshot.sql", "name": "materialization_snapshot_default", - "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", + "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparison later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -33065,7 +33065,7 @@ { "name": "custom_schema_name", "type": "string", - "description": "Default dbt argument - the configed schema of the model" + "description": "Default dbt argument - the configured schema of the model" }, { "name": "node", @@ -33542,7 +33542,7 @@ "path": "macros\\sql\\datetime\\get_time_dimension.sql", "original_file_path": "macros\\sql\\datetime\\get_time_dimension.sql", "name": "default__get_time_dimension", - "macro_sql": "{% macro default__get_time_dimension(level, kwargs) %}\r\n\r\nwith base_times as (\r\n{%- if target.type in ['sqlserver'] and level == 'second' %}\r\n\r\n {%- set base_table = kwargs.get('base_table') -%}\r\n {#-- get_base_times producs WITH statement but SQL Server wont accept WITH inside WITH #}\r\n select dateadd(second, value, '{{ base }}') as time_value\r\n from {{ base_table }}\r\n\r\n{%- else %}\r\n\r\n {{ dbt_resto.get_base_times(level) }}\r\n\r\n{%- endif %}\r\n)\r\n\r\nselect time_value,--string of HH:MM:SS\r\n concat(\r\n {{ dbt_resto.get_time_key('time_value', ['hour','minute','second'], False) }},\r\n ' ',\r\n case\r\n when cast({{ dbt_resto.datepart('time_value', 'hour') }} as {{ dbt_resto.type_int() }}) between 12 and 23\r\n then 'PM'\r\n else 'AM'\r\n end\r\n ) as time_string, --string of HH:MM:SS AM/PM\r\n {{ dbt_resto.get_time_key('time_value', ['hour','minute','second']) }} as time24_string,--string of HH:MM:SS\r\n\r\n case\r\n when cast({{ dbt_resto.datepart('time_value', 'hour') }} as {{ dbt_resto.type_int() }}) > 12\r\n then cast({{ dbt_resto.datepart('time_value', 'hour') }} as {{ dbt_resto.type_int() }}) - 12\r\n else cast({{ dbt_resto.datepart('time_value', 'hour') }} as {{ dbt_resto.type_int() }})\r\n end as hour_number,\r\n concat(\r\n {{ dbt_resto.get_time_key('time_value', ['hour'], False) }},\r\n ' ',\r\n case\r\n when cast({{ dbt_resto.datepart('time_value', 'hour') }} as {{ dbt_resto.type_int() }}) between 12 and 23\r\n then 'PM'\r\n else 'AM'\r\n end\r\n ) as hour_name, --string of HH:00:00 AM/PM\r\n cast({{ dbt_resto.datepart('time_value', 'hour') }} as {{ dbt_resto.type_int() }}) as hour24_number,\r\n {{ dbt_resto.get_time_key('time_value', ['hour']) }} as hour24_name, --string of HH:00:00\r\n\r\n {%- if level in ['minute', 'second'] %}\r\n\r\n cast({{ dbt_resto.datepart('time_value', 'minute') }} as {{ dbt_resto.type_int() }}) as minute_number,\r\n concat(\r\n {{ dbt_resto.get_time_key('time_value', ['hour','minute'], False) }},\r\n ' ',\r\n case\r\n when cast({{ dbt_resto.datepart('time_value', 'hour') }} as {{ dbt_resto.type_int() }}) between 12 and 23\r\n then 'PM'\r\n else 'AM'\r\n end\r\n ) as hour_minute_name, --string of HH:MM:00 AM/PM\r\n {{ dbt_resto.get_time_key('time_value', ['hour','minute']) }} as hour24_minute_name, --string of HH:MM:00\r\n\r\n {%- endif %}\r\n\r\n {%- if level in ['second'] %}\r\n\r\n cast({{ dbt_resto.datepart('time_value', 'second') }} as {{ dbt_resto.type_int() }}) as second_number,\r\n\r\n {%- endif %}\r\n\r\n cast({{ dbt_resto.get_time_key('time_value') }} as {{ dbt_resto.type_bigint() }}) as time_key --number of HHMMSS\r\n\r\nfrom base_times\r\n\r\n{% endmacro %}", + "macro_sql": "{% macro default__get_time_dimension(level, kwargs) %}\r\n\r\nwith base_times as (\r\n{%- if target.type in ['sqlserver'] and level == 'second' %}\r\n\r\n {%- set base_table = kwargs.get('base_table') -%}\r\n {#-- get_base_times producs WITH statement but SQL Server won't accept WITH inside WITH #}\r\n select dateadd(second, value, '{{ base }}') as time_value\r\n from {{ base_table }}\r\n\r\n{%- else %}\r\n\r\n {{ dbt_resto.get_base_times(level) }}\r\n\r\n{%- endif %}\r\n)\r\n\r\nselect time_value,--string of HH:MM:SS\r\n concat(\r\n {{ dbt_resto.get_time_key('time_value', ['hour','minute','second'], False) }},\r\n ' ',\r\n case\r\n when cast({{ dbt_resto.datepart('time_value', 'hour') }} as {{ dbt_resto.type_int() }}) between 12 and 23\r\n then 'PM'\r\n else 'AM'\r\n end\r\n ) as time_string, --string of HH:MM:SS AM/PM\r\n {{ dbt_resto.get_time_key('time_value', ['hour','minute','second']) }} as time24_string,--string of HH:MM:SS\r\n\r\n case\r\n when cast({{ dbt_resto.datepart('time_value', 'hour') }} as {{ dbt_resto.type_int() }}) > 12\r\n then cast({{ dbt_resto.datepart('time_value', 'hour') }} as {{ dbt_resto.type_int() }}) - 12\r\n else cast({{ dbt_resto.datepart('time_value', 'hour') }} as {{ dbt_resto.type_int() }})\r\n end as hour_number,\r\n concat(\r\n {{ dbt_resto.get_time_key('time_value', ['hour'], False) }},\r\n ' ',\r\n case\r\n when cast({{ dbt_resto.datepart('time_value', 'hour') }} as {{ dbt_resto.type_int() }}) between 12 and 23\r\n then 'PM'\r\n else 'AM'\r\n end\r\n ) as hour_name, --string of HH:00:00 AM/PM\r\n cast({{ dbt_resto.datepart('time_value', 'hour') }} as {{ dbt_resto.type_int() }}) as hour24_number,\r\n {{ dbt_resto.get_time_key('time_value', ['hour']) }} as hour24_name, --string of HH:00:00\r\n\r\n {%- if level in ['minute', 'second'] %}\r\n\r\n cast({{ dbt_resto.datepart('time_value', 'minute') }} as {{ dbt_resto.type_int() }}) as minute_number,\r\n concat(\r\n {{ dbt_resto.get_time_key('time_value', ['hour','minute'], False) }},\r\n ' ',\r\n case\r\n when cast({{ dbt_resto.datepart('time_value', 'hour') }} as {{ dbt_resto.type_int() }}) between 12 and 23\r\n then 'PM'\r\n else 'AM'\r\n end\r\n ) as hour_minute_name, --string of HH:MM:00 AM/PM\r\n {{ dbt_resto.get_time_key('time_value', ['hour','minute']) }} as hour24_minute_name, --string of HH:MM:00\r\n\r\n {%- endif %}\r\n\r\n {%- if level in ['second'] %}\r\n\r\n cast({{ dbt_resto.datepart('time_value', 'second') }} as {{ dbt_resto.type_int() }}) as second_number,\r\n\r\n {%- endif %}\r\n\r\n cast({{ dbt_resto.get_time_key('time_value') }} as {{ dbt_resto.type_bigint() }}) as time_key --number of HHMMSS\r\n\r\nfrom base_times\r\n\r\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -33823,7 +33823,7 @@ "path": "macros\\sql\\datetime\\compile\\compile__try_cast.sql", "original_file_path": "macros\\sql\\datetime\\compile\\compile__try_cast.sql", "name": "compile__try_cast", - "macro_sql": "{% macro compile__try_cast() -%}\r\n\r\n create or replace function {{ generate_schema_name(target.schema) }}.try_to_date(_in text, _inpat text, inout _out anyelement)\r\n language plpgsql as\r\n $func$\r\n begin\r\n execute format('SELECT to_date(%L, %L)', $1, $2)\r\n into _out;\r\n exception when others then\r\n null;\r\n end\r\n $func$;\r\n\r\n{%- endmacro %}", + "macro_sql": "{% macro compile__try_cast() -%}\r\n\r\n create or replace function {{ generate_schema_name(target.schema) }}.try_to_date(_in text, _inpat text, input _out anyelement)\r\n language plpgsql as\r\n $func$\r\n begin\r\n execute format('SELECT to_date(%L, %L)', $1, $2)\r\n into _out;\r\n exception when others then\r\n null;\r\n end\r\n $func$;\r\n\r\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -37418,7 +37418,7 @@ "path": "macros\\cross_db_utils\\width_bucket.sql", "original_file_path": "macros\\cross_db_utils\\width_bucket.sql", "name": "default__width_bucket", - "macro_sql": "{% macro default__width_bucket(expr, min_value, max_value, num_buckets) -%}\n\n {% set bin_size -%}\n (( {{ max_value }} - {{ min_value }} ) / {{ num_buckets }} )\n {%- endset %}\n (\n -- to break ties when the amount is eaxtly at the bucket egde\n case\n when\n mod(\n {{ dbt_utils.safe_cast(expr, dbt_utils.type_numeric() ) }},\n {{ dbt_utils.safe_cast(bin_size, dbt_utils.type_numeric() ) }}\n ) = 0\n then 1\n else 0\n end\n ) +\n -- Anything over max_value goes the N+1 bucket\n least(\n ceil(\n ({{ expr }} - {{ min_value }})/{{ bin_size }}\n ),\n {{ num_buckets }} + 1\n )\n{%- endmacro %}", + "macro_sql": "{% macro default__width_bucket(expr, min_value, max_value, num_buckets) -%}\n\n {% set bin_size -%}\n (( {{ max_value }} - {{ min_value }} ) / {{ num_buckets }} )\n {%- endset %}\n (\n -- to break ties when the amount is eaxtly at the bucket edge\n case\n when\n mod(\n {{ dbt_utils.safe_cast(expr, dbt_utils.type_numeric() ) }},\n {{ dbt_utils.safe_cast(bin_size, dbt_utils.type_numeric() ) }}\n ) = 0\n then 1\n else 0\n end\n ) +\n -- Anything over max_value goes the N+1 bucket\n least(\n ceil(\n ({{ expr }} - {{ min_value }})/{{ bin_size }}\n ),\n {{ num_buckets }} + 1\n )\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -40984,7 +40984,7 @@ "test.dbt_resto.not_null_dim_prize_prize_name.f334b496c0": [ "model.dbt_resto.dim_prize" ], - "test.dbt_resto.accepted_values_dim_prize_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2nd_Prize__3rd_Prize.a9ef660412": [ + "test.dbt_resto.accepted_values_dim_prize_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2and_Prize__3rd_Prize.a9ef660412": [ "model.dbt_resto.dim_prize" ], "test.dbt_resto.not_null_dim_prize_prize_order.bc63d43f80": [ @@ -41099,7 +41099,7 @@ "test.dbt_resto.not_null_fact_number_forecast_last_box_result_numbers.bb234c695a": [ "model.dbt_resto.fact_number_forecast" ], - "test.dbt_resto.not_null_fact_number_forecast_forecast_1.ded058c32e": [ + "test.dbt_resto.not_null_fact_number_forecast_forecast_1.dead058c32e": [ "model.dbt_resto.fact_number_forecast" ], "test.dbt_resto.not_null_fact_number_forecast_last_box_result_number_1.4df45fc79d": [ @@ -41219,7 +41219,7 @@ "test.dbt_resto.unique_staging_power655_box_sk_box.3cb514c8c1": [ "model.dbt_resto.staging_power655_box" ], - "test.dbt_resto.not_null_staging_power655_box_sk_box.320d0ede5f": [ + "test.dbt_resto.not_null_staging_power655_box_sk_box.320d0edge5f": [ "model.dbt_resto.staging_power655_box" ], "test.dbt_resto.unique_staging_power655_box_box_date.492223151a": [ @@ -41297,7 +41297,7 @@ "test.dbt_resto.not_null_staging_power655_box_detail_prize_name.d7afedd3ef": [ "model.dbt_resto.staging_power655_box_detail" ], - "test.dbt_resto.accepted_values_staging_power655_box_detail_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2nd_Prize__3rd_Prize.8d59a2292d": [ + "test.dbt_resto.accepted_values_staging_power655_box_detail_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2and_Prize__3rd_Prize.8d59a2292d": [ "model.dbt_resto.staging_power655_box_detail" ], "test.dbt_resto.not_null_staging_power655_box_detail_prize_won.545ec818dd": [ @@ -41488,7 +41488,7 @@ ], "model.dbt_resto.dim_prize": [ "exposure.dbt_resto.power655_forecast", - "test.dbt_resto.accepted_values_dim_prize_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2nd_Prize__3rd_Prize.a9ef660412", + "test.dbt_resto.accepted_values_dim_prize_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2and_Prize__3rd_Prize.a9ef660412", "test.dbt_resto.dbt_utils_expression_is_true_dim_prize_prize_order___1.05fc15ef2d", "test.dbt_resto.dbt_utils_expression_is_true_dim_prize_prize_order___2.a272475871", "test.dbt_resto.dbt_utils_expression_is_true_dim_prize_prize_order___3.6f39bdba6d", @@ -41529,7 +41529,7 @@ "test.dbt_resto.unique_fact_number_number_value.0b64375e98" ], "model.dbt_resto.fact_number_forecast": [ - "test.dbt_resto.not_null_fact_number_forecast_forecast_1.ded058c32e", + "test.dbt_resto.not_null_fact_number_forecast_forecast_1.dead058c32e", "test.dbt_resto.not_null_fact_number_forecast_forecast_2.7b02c6f0f4", "test.dbt_resto.not_null_fact_number_forecast_forecast_3.14f65ff8ea", "test.dbt_resto.not_null_fact_number_forecast_forecast_4.a64f5e1198", @@ -41604,7 +41604,7 @@ "test.dbt_resto.not_null_staging_power655_box_box_result_number_5.5fa3608a1b", "test.dbt_resto.not_null_staging_power655_box_box_result_number_6.eaa7e17e9c", "test.dbt_resto.not_null_staging_power655_box_box_result_numbers.18b8bc6699", - "test.dbt_resto.not_null_staging_power655_box_sk_box.320d0ede5f", + "test.dbt_resto.not_null_staging_power655_box_sk_box.320d0edge5f", "test.dbt_resto.unique_staging_power655_box_box_date.492223151a", "test.dbt_resto.unique_staging_power655_box_box_id.3e89c32e6d", "test.dbt_resto.unique_staging_power655_box_box_name.6e48563e9d", @@ -41613,7 +41613,7 @@ "model.dbt_resto.staging_power655_box_detail": [ "model.dbt_resto.dim_prize", "model.dbt_resto.fact_result", - "test.dbt_resto.accepted_values_staging_power655_box_detail_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2nd_Prize__3rd_Prize.8d59a2292d", + "test.dbt_resto.accepted_values_staging_power655_box_detail_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2and_Prize__3rd_Prize.8d59a2292d", "test.dbt_resto.dbt_utils_expression_is_true_staging_power655_box_detail_prize_value___0.8011cd1a8c", "test.dbt_resto.dbt_utils_expression_is_true_staging_power655_box_detail_prize_won___0.8934b0ce2b", "test.dbt_resto.dbt_utils_unique_combination_of_columns_staging_power655_box_detail_box_id__prize_name.140f6e3dc8", @@ -41735,7 +41735,7 @@ "test.dbt_resto.not_null_dim_prize_prize_key.2f1a24a14a": [], "test.dbt_resto.unique_dim_prize_prize_key.89c819e56f": [], "test.dbt_resto.not_null_dim_prize_prize_name.f334b496c0": [], - "test.dbt_resto.accepted_values_dim_prize_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2nd_Prize__3rd_Prize.a9ef660412": [], + "test.dbt_resto.accepted_values_dim_prize_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2and_Prize__3rd_Prize.a9ef660412": [], "test.dbt_resto.not_null_dim_prize_prize_order.bc63d43f80": [], "test.dbt_resto.dbt_utils_expression_is_true_dim_prize_prize_order___1.05fc15ef2d": [], "test.dbt_resto.dbt_utils_expression_is_true_dim_prize_prize_order___2.a272475871": [], @@ -41771,7 +41771,7 @@ "test.dbt_resto.not_null_fact_number_forecast_last_box_date.82f507a41f": [], "test.dbt_resto.not_null_fact_number_forecast_forecast_numbers.a45c6cfd86": [], "test.dbt_resto.not_null_fact_number_forecast_last_box_result_numbers.bb234c695a": [], - "test.dbt_resto.not_null_fact_number_forecast_forecast_1.ded058c32e": [], + "test.dbt_resto.not_null_fact_number_forecast_forecast_1.dead058c32e": [], "test.dbt_resto.not_null_fact_number_forecast_last_box_result_number_1.4df45fc79d": [], "test.dbt_resto.not_null_fact_number_forecast_forecast_2.7b02c6f0f4": [], "test.dbt_resto.not_null_fact_number_forecast_last_box_result_number_2.267bedde94": [], @@ -41810,7 +41810,7 @@ "test.dbt_resto.not_null_fact_set_number_occurrence.404244b933": [], "test.dbt_resto.dbt_utils_expression_is_true_fact_set_number_occurrence___0.9b854b58c0": [], "test.dbt_resto.unique_staging_power655_box_sk_box.3cb514c8c1": [], - "test.dbt_resto.not_null_staging_power655_box_sk_box.320d0ede5f": [], + "test.dbt_resto.not_null_staging_power655_box_sk_box.320d0edge5f": [], "test.dbt_resto.unique_staging_power655_box_box_date.492223151a": [], "test.dbt_resto.not_null_staging_power655_box_box_date.e887270dc8": [], "test.dbt_resto.unique_staging_power655_box_box_id.3e89c32e6d": [], @@ -41836,7 +41836,7 @@ "test.dbt_resto.not_null_staging_power655_box_detail_box_id.b294d9fa28": [], "test.dbt_resto.not_null_staging_power655_box_detail_prize_name_raw.33b6e09a09": [], "test.dbt_resto.not_null_staging_power655_box_detail_prize_name.d7afedd3ef": [], - "test.dbt_resto.accepted_values_staging_power655_box_detail_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2nd_Prize__3rd_Prize.8d59a2292d": [], + "test.dbt_resto.accepted_values_staging_power655_box_detail_prize_name__Jackpot_1__Jackpot_2__1st_Prize__2and_Prize__3rd_Prize.8d59a2292d": [], "test.dbt_resto.not_null_staging_power655_box_detail_prize_won.545ec818dd": [], "test.dbt_resto.dbt_utils_expression_is_true_staging_power655_box_detail_prize_won___0.8934b0ce2b": [], "test.dbt_resto.not_null_staging_power655_box_detail_prize_value_raw.ad9ac80702": [], diff --git a/samples/facebookad/manifest.json b/samples/facebookad/manifest.json index 7e37608..f521016 100644 --- a/samples/facebookad/manifest.json +++ b/samples/facebookad/manifest.json @@ -5884,7 +5884,7 @@ }, "config": { "enabled": true, - "alias": "dbt_utils_unique_combination_o_42caf2b48bab7db5c316ae2524dd0683", + "alias": "dbt_utils_unique_combination_o_42calf2b48bab7db5c316ae2524dd0683", "schema": "dbt_test__audit", "database": null, "tags": [], @@ -5905,14 +5905,14 @@ "dbt_utils_unique_combination_of_columns_stg_facebook_ads__basic_ad_date_day__ad_id__account_id" ], "unique_id": "test.facebook_ads_source.dbt_utils_unique_combination_of_columns_stg_facebook_ads__basic_ad_date_day__ad_id__account_id.bd909529e7", - "raw_code": "{{ dbt_utils.test_unique_combination_of_columns(**_dbt_generic_test_kwargs) }}{{ config(alias=\"dbt_utils_unique_combination_o_42caf2b48bab7db5c316ae2524dd0683\") }}", + "raw_code": "{{ dbt_utils.test_unique_combination_of_columns(**_dbt_generic_test_kwargs) }}{{ config(alias=\"dbt_utils_unique_combination_o_42calf2b48bab7db5c316ae2524dd0683\") }}", "language": "sql", "package_name": "facebook_ads_source", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/facebook_ads/dbt_facebook_ads/integration_tests/dbt_packages/facebook_ads_source", - "path": "dbt_utils_unique_combination_o_42caf2b48bab7db5c316ae2524dd0683.sql", + "path": "dbt_utils_unique_combination_o_42calf2b48bab7db5c316ae2524dd0683.sql", "original_file_path": "models/stg_facebook_ads.yml", "name": "dbt_utils_unique_combination_of_columns_stg_facebook_ads__basic_ad_date_day__ad_id__account_id", - "alias": "dbt_utils_unique_combination_o_42caf2b48bab7db5c316ae2524dd0683", + "alias": "dbt_utils_unique_combination_o_42calf2b48bab7db5c316ae2524dd0683", "checksum": { "name": "none", "checksum": "" @@ -5937,7 +5937,7 @@ "build_path": null, "deferred": false, "unrendered_config": { - "alias": "dbt_utils_unique_combination_o_42caf2b48bab7db5c316ae2524dd0683" + "alias": "dbt_utils_unique_combination_o_42calf2b48bab7db5c316ae2524dd0683" }, "created_at": 1671505151.292354, "compiled_code": "\n\n\n\n\n\nwith validation_errors as (\n\n select\n date_day, ad_id, account_id\n from \"postgres\".\"facebook_ads_integration_tests_facebook_ads_source\".\"stg_facebook_ads__basic_ad\"\n group by date_day, ad_id, account_id\n having count(*) > 1\n\n)\n\nselect *\nfrom validation_errors\n\n\n", @@ -8409,7 +8409,7 @@ "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "name": "materialization_snapshot_default", - "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", + "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparison later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -9172,7 +9172,7 @@ "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "name": "materialization_incremental_default", - "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", + "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -9320,7 +9320,7 @@ "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "name": "materialization_table_default", - "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", + "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -9460,7 +9460,7 @@ "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "name": "materialization_view_default", - "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", + "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": { @@ -9681,7 +9681,7 @@ "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "name": "materialization_seed_default", - "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparision later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", + "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -10421,7 +10421,7 @@ "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "dates_in_range", - "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partiton start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", + "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -16733,7 +16733,7 @@ "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "name": "default__width_bucket", - "macro_sql": "{% macro default__width_bucket(expr, min_value, max_value, num_buckets) -%}\n\n {% set bin_size -%}\n (( {{ max_value }} - {{ min_value }} ) / {{ num_buckets }} )\n {%- endset %}\n (\n -- to break ties when the amount is eaxtly at the bucket egde\n case\n when\n mod(\n {{ dbt.safe_cast(expr, dbt.type_numeric() ) }},\n {{ dbt.safe_cast(bin_size, dbt.type_numeric() ) }}\n ) = 0\n then 1\n else 0\n end\n ) +\n -- Anything over max_value goes the N+1 bucket\n least(\n ceil(\n ({{ expr }} - {{ min_value }})/{{ bin_size }}\n ),\n {{ num_buckets }} + 1\n )\n{%- endmacro %}", + "macro_sql": "{% macro default__width_bucket(expr, min_value, max_value, num_buckets) -%}\n\n {% set bin_size -%}\n (( {{ max_value }} - {{ min_value }} ) / {{ num_buckets }} )\n {%- endset %}\n (\n -- to break ties when the amount is eaxtly at the bucket edge\n case\n when\n mod(\n {{ dbt.safe_cast(expr, dbt.type_numeric() ) }},\n {{ dbt.safe_cast(bin_size, dbt.type_numeric() ) }}\n ) = 0\n then 1\n else 0\n end\n ) +\n -- Anything over max_value goes the N+1 bucket\n least(\n ceil(\n ({{ expr }} - {{ min_value }})/{{ bin_size }}\n ),\n {{ num_buckets }} + 1\n )\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": { diff --git a/samples/fivetranlog/manifest.json b/samples/fivetranlog/manifest.json index 9cfef11..883399b 100644 --- a/samples/fivetranlog/manifest.json +++ b/samples/fivetranlog/manifest.json @@ -1088,7 +1088,7 @@ }, "destination_id": { "name": "destination_id", - "description": "Foreign key referenencing the `destination` whose data is being transformed.", + "description": "Foreign key referencing the `destination` whose data is being transformed.", "meta": {}, "data_type": null, "quote": null, @@ -1244,7 +1244,7 @@ "fivetran_log__connector_status" ], "unique_id": "model.fivetran_log.fivetran_log__connector_status", - "raw_code": "with transformation_removal as (\n\n select *\n from {{ ref('stg_fivetran_log__log') }}\n where transformation_id is null\n\n),\n\nconnector_log as (\n select \n *,\n sum( case when event_subtype in ('sync_start') then 1 else 0 end) over ( partition by connector_id \n order by created_at rows unbounded preceding) as sync_batch_id\n from transformation_removal\n -- only looking at errors, warnings, and syncs here\n where event_type = 'SEVERE'\n or event_type = 'WARNING'\n or event_subtype like 'sync%'\n or (event_subtype = 'status' \n and {{ fivetran_utils.json_parse(string=\"message_data\", string_path=[\"status\"]) }} = 'RESCHEDULED'\n \n and {{ fivetran_utils.json_parse(string=\"message_data\", string_path=[\"reason\"]) }} like '%intended behavior%'\n ) -- for priority-first syncs. these should be captured by event_type = 'WARNING' but let's make sure\n or (event_subtype = 'status' \n and {{ fivetran_utils.json_parse(string=\"message_data\", string_path=[\"status\"]) }} = 'SUCCESSFUL'\n )\n -- whole reason is \"We have rescheduled the connector to force flush data from the forward sync into your destination. This is intended behavior and means that the connector is working as expected.\"\n),\n\nschema_changes as (\n\n select\n connector_id,\n count(*) as number_of_schema_changes_last_month\n\n from {{ ref('stg_fivetran_log__log') }}\n\n where \n {{ dbt.datediff('created_at', dbt.current_timestamp_backcompat(), 'day') }} <= 30\n and event_subtype in ('create_table', 'alter_table', 'create_schema', 'change_schema_config')\n\n group by 1\n\n),\n\nconnector as (\n\n select *\n from {{ ref('stg_fivetran_log__connector') }}\n\n),\n\ndestination as (\n\n select * \n from {{ ref('stg_fivetran_log__destination') }}\n),\n\nconnector_metrics as (\n\n select\n connector.connector_id,\n connector.connector_name,\n connector.connector_type,\n connector.destination_id,\n connector.is_paused,\n connector.set_up_at,\n max(case when connector_log.event_subtype = 'sync_start' then connector_log.created_at else null end) as last_sync_started_at,\n\n max(case when connector_log.event_subtype = 'sync_end' \n then connector_log.created_at else null end) as last_sync_completed_at,\n\n max(case when connector_log.event_subtype in ('status', 'sync_end')\n and {{ fivetran_utils.json_parse(string=\"connector_log.message_data\", string_path=[\"status\"]) }} ='SUCCESSFUL'\n then connector_log.created_at else null end) as last_successful_sync_completed_at,\n\n\n max(case when connector_log.event_subtype = 'sync_end' \n then connector_log.sync_batch_id else null end) as last_sync_batch_id,\n\n max(case when connector_log.event_subtype in ('status', 'sync_end')\n and {{ fivetran_utils.json_parse(string=\"connector_log.message_data\", string_path=[\"status\"]) }} ='RESCHEDULED'\n and {{ fivetran_utils.json_parse(string=\"connector_log.message_data\", string_path=[\"reason\"]) }} like '%intended behavior%'\n then connector_log.created_at else null end) as last_priority_first_sync_completed_at,\n \n\n max(case when connector_log.event_type = 'SEVERE' then connector_log.created_at else null end) as last_error_at,\n\n max(case when connector_log.event_type = 'SEVERE' then connector_log.sync_batch_id else null end) as last_error_batch,\n max(case when event_type = 'WARNING' then connector_log.created_at else null end) as last_warning_at\n\n from connector \n left join connector_log \n on connector_log.connector_id = connector.connector_id\n {{ dbt_utils.group_by(n=6) }}\n\n),\n\nconnector_health as (\n\n select\n *,\n case \n -- connector is paused\n when is_paused then 'paused'\n\n -- a sync has never been attempted\n when last_sync_started_at is null then 'incomplete'\n\n -- a priority-first sync has occurred, but a normal sync has not\n when last_priority_first_sync_completed_at is not null and last_sync_completed_at is null then 'priority first sync'\n\n -- a priority sync has occurred more recently than a normal one (may occurr if the connector has been paused and resumed)\n when last_priority_first_sync_completed_at > last_sync_completed_at then 'priority first sync'\n\n -- a sync has been attempted, but not completed, and it's not due to errors. also a priority-first sync hasn't\n when last_sync_completed_at is null and last_error_at is null then 'initial sync in progress'\n\n -- the last attempted sync had an error\n when last_sync_batch_id = last_error_batch then 'broken'\n\n -- there's never been a successful sync and there have been errors\n when last_sync_completed_at is null and last_error_at is not null then 'broken'\n\n else 'connected' end as connector_health\n\n from connector_metrics\n),\n\n-- Joining with log to grab pertinent error/warning messagees\nconnector_recent_logs as (\n\n select \n connector_health.connector_id,\n connector_health.connector_name,\n connector_health.connector_type,\n connector_health.destination_id,\n connector_health.connector_health,\n connector_health.last_successful_sync_completed_at,\n connector_health.last_sync_started_at,\n connector_health.last_sync_completed_at,\n connector_health.set_up_at,\n connector_log.event_subtype,\n connector_log.event_type,\n connector_log.message_data\n\n from connector_health \n left join connector_log \n on connector_log.connector_id = connector_health.connector_id\n -- limiting relevance to since the last successful sync completion (if there has been one)\n and connector_log.created_at > coalesce(connector_health.last_sync_completed_at, connector_health.last_priority_first_sync_completed_at, '2000-01-01') \n -- only looking at errors and warnings (excluding syncs - both normal and priority first)\n and connector_log.event_type != 'INFO' \n -- need to explicitly avoid priority first statuses because they are of event_type WARNING\n and not (connector_log.event_subtype = 'status' \n and {{ fivetran_utils.json_parse(string=\"connector_log.message_data\", string_path=[\"status\"]) }} ='RESCHEDULED'\n and {{ fivetran_utils.json_parse(string=\"connector_log.message_data\", string_path=[\"reason\"]) }} like '%intended behavior%')\n\n {{ dbt_utils.group_by(n=12) }} -- de-duping error messages\n \n\n),\n\nfinal as (\n\n select\n connector_recent_logs.connector_id,\n connector_recent_logs.connector_name,\n connector_recent_logs.connector_type,\n connector_recent_logs.destination_id,\n destination.destination_name,\n connector_recent_logs.connector_health,\n connector_recent_logs.last_successful_sync_completed_at,\n connector_recent_logs.last_sync_started_at,\n connector_recent_logs.last_sync_completed_at,\n connector_recent_logs.set_up_at,\n coalesce(schema_changes.number_of_schema_changes_last_month, 0) as number_of_schema_changes_last_month\n \n {% if var('fivetran_log_using_sync_alert_messages', true) %}\n , {{ fivetran_utils.string_agg(\"distinct case when connector_recent_logs.event_type = 'SEVERE' then connector_recent_logs.message_data else null end\", \"'\\\\n'\") }} as errors_since_last_completed_sync\n , {{ fivetran_utils.string_agg(\"distinct case when connector_recent_logs.event_type = 'WARNING' then connector_recent_logs.message_data else null end\", \"'\\\\n'\") }} as warnings_since_last_completed_sync\n {% endif %}\n\n from connector_recent_logs\n left join schema_changes \n on connector_recent_logs.connector_id = schema_changes.connector_id \n\n join destination on destination.destination_id = connector_recent_logs.destination_id\n {{ dbt_utils.group_by(n=11) }}\n)\n\nselect * from final", + "raw_code": "with transformation_removal as (\n\n select *\n from {{ ref('stg_fivetran_log__log') }}\n where transformation_id is null\n\n),\n\nconnector_log as (\n select \n *,\n sum( case when event_subtype in ('sync_start') then 1 else 0 end) over ( partition by connector_id \n order by created_at rows unbounded preceding) as sync_batch_id\n from transformation_removal\n -- only looking at errors, warnings, and syncs here\n where event_type = 'SEVERE'\n or event_type = 'WARNING'\n or event_subtype like 'sync%'\n or (event_subtype = 'status' \n and {{ fivetran_utils.json_parse(string=\"message_data\", string_path=[\"status\"]) }} = 'RESCHEDULED'\n \n and {{ fivetran_utils.json_parse(string=\"message_data\", string_path=[\"reason\"]) }} like '%intended behavior%'\n ) -- for priority-first syncs. these should be captured by event_type = 'WARNING' but let's make sure\n or (event_subtype = 'status' \n and {{ fivetran_utils.json_parse(string=\"message_data\", string_path=[\"status\"]) }} = 'SUCCESSFUL'\n )\n -- whole reason is \"We have rescheduled the connector to force flush data from the forward sync into your destination. This is intended behavior and means that the connector is working as expected.\"\n),\n\nschema_changes as (\n\n select\n connector_id,\n count(*) as number_of_schema_changes_last_month\n\n from {{ ref('stg_fivetran_log__log') }}\n\n where \n {{ dbt.datediff('created_at', dbt.current_timestamp_backcompat(), 'day') }} <= 30\n and event_subtype in ('create_table', 'alter_table', 'create_schema', 'change_schema_config')\n\n group by 1\n\n),\n\nconnector as (\n\n select *\n from {{ ref('stg_fivetran_log__connector') }}\n\n),\n\ndestination as (\n\n select * \n from {{ ref('stg_fivetran_log__destination') }}\n),\n\nconnector_metrics as (\n\n select\n connector.connector_id,\n connector.connector_name,\n connector.connector_type,\n connector.destination_id,\n connector.is_paused,\n connector.set_up_at,\n max(case when connector_log.event_subtype = 'sync_start' then connector_log.created_at else null end) as last_sync_started_at,\n\n max(case when connector_log.event_subtype = 'sync_end' \n then connector_log.created_at else null end) as last_sync_completed_at,\n\n max(case when connector_log.event_subtype in ('status', 'sync_end')\n and {{ fivetran_utils.json_parse(string=\"connector_log.message_data\", string_path=[\"status\"]) }} ='SUCCESSFUL'\n then connector_log.created_at else null end) as last_successful_sync_completed_at,\n\n\n max(case when connector_log.event_subtype = 'sync_end' \n then connector_log.sync_batch_id else null end) as last_sync_batch_id,\n\n max(case when connector_log.event_subtype in ('status', 'sync_end')\n and {{ fivetran_utils.json_parse(string=\"connector_log.message_data\", string_path=[\"status\"]) }} ='RESCHEDULED'\n and {{ fivetran_utils.json_parse(string=\"connector_log.message_data\", string_path=[\"reason\"]) }} like '%intended behavior%'\n then connector_log.created_at else null end) as last_priority_first_sync_completed_at,\n \n\n max(case when connector_log.event_type = 'SEVERE' then connector_log.created_at else null end) as last_error_at,\n\n max(case when connector_log.event_type = 'SEVERE' then connector_log.sync_batch_id else null end) as last_error_batch,\n max(case when event_type = 'WARNING' then connector_log.created_at else null end) as last_warning_at\n\n from connector \n left join connector_log \n on connector_log.connector_id = connector.connector_id\n {{ dbt_utils.group_by(n=6) }}\n\n),\n\nconnector_health as (\n\n select\n *,\n case \n -- connector is paused\n when is_paused then 'paused'\n\n -- a sync has never been attempted\n when last_sync_started_at is null then 'incomplete'\n\n -- a priority-first sync has occurred, but a normal sync has not\n when last_priority_first_sync_completed_at is not null and last_sync_completed_at is null then 'priority first sync'\n\n -- a priority sync has occurred more recently than a normal one (may occur if the connector has been paused and resumed)\n when last_priority_first_sync_completed_at > last_sync_completed_at then 'priority first sync'\n\n -- a sync has been attempted, but not completed, and it's not due to errors. also a priority-first sync hasn't\n when last_sync_completed_at is null and last_error_at is null then 'initial sync in progress'\n\n -- the last attempted sync had an error\n when last_sync_batch_id = last_error_batch then 'broken'\n\n -- there's never been a successful sync and there have been errors\n when last_sync_completed_at is null and last_error_at is not null then 'broken'\n\n else 'connected' end as connector_health\n\n from connector_metrics\n),\n\n-- Joining with log to grab pertinent error/warning messagees\nconnector_recent_logs as (\n\n select \n connector_health.connector_id,\n connector_health.connector_name,\n connector_health.connector_type,\n connector_health.destination_id,\n connector_health.connector_health,\n connector_health.last_successful_sync_completed_at,\n connector_health.last_sync_started_at,\n connector_health.last_sync_completed_at,\n connector_health.set_up_at,\n connector_log.event_subtype,\n connector_log.event_type,\n connector_log.message_data\n\n from connector_health \n left join connector_log \n on connector_log.connector_id = connector_health.connector_id\n -- limiting relevance to since the last successful sync completion (if there has been one)\n and connector_log.created_at > coalesce(connector_health.last_sync_completed_at, connector_health.last_priority_first_sync_completed_at, '2000-01-01') \n -- only looking at errors and warnings (excluding syncs - both normal and priority first)\n and connector_log.event_type != 'INFO' \n -- need to explicitly avoid priority first statuses because they are of event_type WARNING\n and not (connector_log.event_subtype = 'status' \n and {{ fivetran_utils.json_parse(string=\"connector_log.message_data\", string_path=[\"status\"]) }} ='RESCHEDULED'\n and {{ fivetran_utils.json_parse(string=\"connector_log.message_data\", string_path=[\"reason\"]) }} like '%intended behavior%')\n\n {{ dbt_utils.group_by(n=12) }} -- de-duping error messages\n \n\n),\n\nfinal as (\n\n select\n connector_recent_logs.connector_id,\n connector_recent_logs.connector_name,\n connector_recent_logs.connector_type,\n connector_recent_logs.destination_id,\n destination.destination_name,\n connector_recent_logs.connector_health,\n connector_recent_logs.last_successful_sync_completed_at,\n connector_recent_logs.last_sync_started_at,\n connector_recent_logs.last_sync_completed_at,\n connector_recent_logs.set_up_at,\n coalesce(schema_changes.number_of_schema_changes_last_month, 0) as number_of_schema_changes_last_month\n \n {% if var('fivetran_log_using_sync_alert_messages', true) %}\n , {{ fivetran_utils.string_agg(\"distinct case when connector_recent_logs.event_type = 'SEVERE' then connector_recent_logs.message_data else null end\", \"'\\\\n'\") }} as errors_since_last_completed_sync\n , {{ fivetran_utils.string_agg(\"distinct case when connector_recent_logs.event_type = 'WARNING' then connector_recent_logs.message_data else null end\", \"'\\\\n'\") }} as warnings_since_last_completed_sync\n {% endif %}\n\n from connector_recent_logs\n left join schema_changes \n on connector_recent_logs.connector_id = schema_changes.connector_id \n\n join destination on destination.destination_id = connector_recent_logs.destination_id\n {{ dbt_utils.group_by(n=11) }}\n)\n\nselect * from final", "language": "sql", "package_name": "fivetran_log", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/fivetran_log/dbt_fivetran_log/integration_tests/dbt_packages/fivetran_log", @@ -1394,7 +1394,7 @@ "schema": "fivetran_log" }, "created_at": 1676567893.353823, - "compiled_code": "with transformation_removal as (\n\n select *\n from \"postgres\".\"fivetrans_logs_integration_tests_fivetran_log\".\"stg_fivetran_log__log\"\n where transformation_id is null\n\n),\n\nconnector_log as (\n select \n *,\n sum( case when event_subtype in ('sync_start') then 1 else 0 end) over ( partition by connector_id \n order by created_at rows unbounded preceding) as sync_batch_id\n from transformation_removal\n -- only looking at errors, warnings, and syncs here\n where event_type = 'SEVERE'\n or event_type = 'WARNING'\n or event_subtype like 'sync%'\n or (event_subtype = 'status' \n and \n\n message_data::json #>> '{status}'\n\n = 'RESCHEDULED'\n \n and \n\n message_data::json #>> '{reason}'\n\n like '%intended behavior%'\n ) -- for priority-first syncs. these should be captured by event_type = 'WARNING' but let's make sure\n or (event_subtype = 'status' \n and \n\n message_data::json #>> '{status}'\n\n = 'SUCCESSFUL'\n )\n -- whole reason is \"We have rescheduled the connector to force flush data from the forward sync into your destination. This is intended behavior and means that the connector is working as expected.\"\n),\n\nschema_changes as (\n\n select\n connector_id,\n count(*) as number_of_schema_changes_last_month\n\n from \"postgres\".\"fivetrans_logs_integration_tests_fivetran_log\".\"stg_fivetran_log__log\"\n\n where \n \n ((\n current_timestamp::TIMESTAMP\n)::date - (created_at)::date)\n <= 30\n and event_subtype in ('create_table', 'alter_table', 'create_schema', 'change_schema_config')\n\n group by 1\n\n),\n\nconnector as (\n\n select *\n from \"postgres\".\"fivetrans_logs_integration_tests_fivetran_log\".\"stg_fivetran_log__connector\"\n\n),\n\ndestination as (\n\n select * \n from \"postgres\".\"fivetrans_logs_integration_tests_fivetran_log\".\"stg_fivetran_log__destination\"\n),\n\nconnector_metrics as (\n\n select\n connector.connector_id,\n connector.connector_name,\n connector.connector_type,\n connector.destination_id,\n connector.is_paused,\n connector.set_up_at,\n max(case when connector_log.event_subtype = 'sync_start' then connector_log.created_at else null end) as last_sync_started_at,\n\n max(case when connector_log.event_subtype = 'sync_end' \n then connector_log.created_at else null end) as last_sync_completed_at,\n\n max(case when connector_log.event_subtype in ('status', 'sync_end')\n and \n\n connector_log.message_data::json #>> '{status}'\n\n ='SUCCESSFUL'\n then connector_log.created_at else null end) as last_successful_sync_completed_at,\n\n\n max(case when connector_log.event_subtype = 'sync_end' \n then connector_log.sync_batch_id else null end) as last_sync_batch_id,\n\n max(case when connector_log.event_subtype in ('status', 'sync_end')\n and \n\n connector_log.message_data::json #>> '{status}'\n\n ='RESCHEDULED'\n and \n\n connector_log.message_data::json #>> '{reason}'\n\n like '%intended behavior%'\n then connector_log.created_at else null end) as last_priority_first_sync_completed_at,\n \n\n max(case when connector_log.event_type = 'SEVERE' then connector_log.created_at else null end) as last_error_at,\n\n max(case when connector_log.event_type = 'SEVERE' then connector_log.sync_batch_id else null end) as last_error_batch,\n max(case when event_type = 'WARNING' then connector_log.created_at else null end) as last_warning_at\n\n from connector \n left join connector_log \n on connector_log.connector_id = connector.connector_id\n group by 1,2,3,4,5,6\n\n),\n\nconnector_health as (\n\n select\n *,\n case \n -- connector is paused\n when is_paused then 'paused'\n\n -- a sync has never been attempted\n when last_sync_started_at is null then 'incomplete'\n\n -- a priority-first sync has occurred, but a normal sync has not\n when last_priority_first_sync_completed_at is not null and last_sync_completed_at is null then 'priority first sync'\n\n -- a priority sync has occurred more recently than a normal one (may occurr if the connector has been paused and resumed)\n when last_priority_first_sync_completed_at > last_sync_completed_at then 'priority first sync'\n\n -- a sync has been attempted, but not completed, and it's not due to errors. also a priority-first sync hasn't\n when last_sync_completed_at is null and last_error_at is null then 'initial sync in progress'\n\n -- the last attempted sync had an error\n when last_sync_batch_id = last_error_batch then 'broken'\n\n -- there's never been a successful sync and there have been errors\n when last_sync_completed_at is null and last_error_at is not null then 'broken'\n\n else 'connected' end as connector_health\n\n from connector_metrics\n),\n\n-- Joining with log to grab pertinent error/warning messagees\nconnector_recent_logs as (\n\n select \n connector_health.connector_id,\n connector_health.connector_name,\n connector_health.connector_type,\n connector_health.destination_id,\n connector_health.connector_health,\n connector_health.last_successful_sync_completed_at,\n connector_health.last_sync_started_at,\n connector_health.last_sync_completed_at,\n connector_health.set_up_at,\n connector_log.event_subtype,\n connector_log.event_type,\n connector_log.message_data\n\n from connector_health \n left join connector_log \n on connector_log.connector_id = connector_health.connector_id\n -- limiting relevance to since the last successful sync completion (if there has been one)\n and connector_log.created_at > coalesce(connector_health.last_sync_completed_at, connector_health.last_priority_first_sync_completed_at, '2000-01-01') \n -- only looking at errors and warnings (excluding syncs - both normal and priority first)\n and connector_log.event_type != 'INFO' \n -- need to explicitly avoid priority first statuses because they are of event_type WARNING\n and not (connector_log.event_subtype = 'status' \n and \n\n connector_log.message_data::json #>> '{status}'\n\n ='RESCHEDULED'\n and \n\n connector_log.message_data::json #>> '{reason}'\n\n like '%intended behavior%')\n\n group by 1,2,3,4,5,6,7,8,9,10,11,12 -- de-duping error messages\n \n\n),\n\nfinal as (\n\n select\n connector_recent_logs.connector_id,\n connector_recent_logs.connector_name,\n connector_recent_logs.connector_type,\n connector_recent_logs.destination_id,\n destination.destination_name,\n connector_recent_logs.connector_health,\n connector_recent_logs.last_successful_sync_completed_at,\n connector_recent_logs.last_sync_started_at,\n connector_recent_logs.last_sync_completed_at,\n connector_recent_logs.set_up_at,\n coalesce(schema_changes.number_of_schema_changes_last_month, 0) as number_of_schema_changes_last_month\n \n \n , \n string_agg(distinct case when connector_recent_logs.event_type = 'SEVERE' then connector_recent_logs.message_data else null end, '\\n')\n\n as errors_since_last_completed_sync\n , \n string_agg(distinct case when connector_recent_logs.event_type = 'WARNING' then connector_recent_logs.message_data else null end, '\\n')\n\n as warnings_since_last_completed_sync\n \n\n from connector_recent_logs\n left join schema_changes \n on connector_recent_logs.connector_id = schema_changes.connector_id \n\n join destination on destination.destination_id = connector_recent_logs.destination_id\n group by 1,2,3,4,5,6,7,8,9,10,11\n)\n\nselect * from final", + "compiled_code": "with transformation_removal as (\n\n select *\n from \"postgres\".\"fivetrans_logs_integration_tests_fivetran_log\".\"stg_fivetran_log__log\"\n where transformation_id is null\n\n),\n\nconnector_log as (\n select \n *,\n sum( case when event_subtype in ('sync_start') then 1 else 0 end) over ( partition by connector_id \n order by created_at rows unbounded preceding) as sync_batch_id\n from transformation_removal\n -- only looking at errors, warnings, and syncs here\n where event_type = 'SEVERE'\n or event_type = 'WARNING'\n or event_subtype like 'sync%'\n or (event_subtype = 'status' \n and \n\n message_data::json #>> '{status}'\n\n = 'RESCHEDULED'\n \n and \n\n message_data::json #>> '{reason}'\n\n like '%intended behavior%'\n ) -- for priority-first syncs. these should be captured by event_type = 'WARNING' but let's make sure\n or (event_subtype = 'status' \n and \n\n message_data::json #>> '{status}'\n\n = 'SUCCESSFUL'\n )\n -- whole reason is \"We have rescheduled the connector to force flush data from the forward sync into your destination. This is intended behavior and means that the connector is working as expected.\"\n),\n\nschema_changes as (\n\n select\n connector_id,\n count(*) as number_of_schema_changes_last_month\n\n from \"postgres\".\"fivetrans_logs_integration_tests_fivetran_log\".\"stg_fivetran_log__log\"\n\n where \n \n ((\n current_timestamp::TIMESTAMP\n)::date - (created_at)::date)\n <= 30\n and event_subtype in ('create_table', 'alter_table', 'create_schema', 'change_schema_config')\n\n group by 1\n\n),\n\nconnector as (\n\n select *\n from \"postgres\".\"fivetrans_logs_integration_tests_fivetran_log\".\"stg_fivetran_log__connector\"\n\n),\n\ndestination as (\n\n select * \n from \"postgres\".\"fivetrans_logs_integration_tests_fivetran_log\".\"stg_fivetran_log__destination\"\n),\n\nconnector_metrics as (\n\n select\n connector.connector_id,\n connector.connector_name,\n connector.connector_type,\n connector.destination_id,\n connector.is_paused,\n connector.set_up_at,\n max(case when connector_log.event_subtype = 'sync_start' then connector_log.created_at else null end) as last_sync_started_at,\n\n max(case when connector_log.event_subtype = 'sync_end' \n then connector_log.created_at else null end) as last_sync_completed_at,\n\n max(case when connector_log.event_subtype in ('status', 'sync_end')\n and \n\n connector_log.message_data::json #>> '{status}'\n\n ='SUCCESSFUL'\n then connector_log.created_at else null end) as last_successful_sync_completed_at,\n\n\n max(case when connector_log.event_subtype = 'sync_end' \n then connector_log.sync_batch_id else null end) as last_sync_batch_id,\n\n max(case when connector_log.event_subtype in ('status', 'sync_end')\n and \n\n connector_log.message_data::json #>> '{status}'\n\n ='RESCHEDULED'\n and \n\n connector_log.message_data::json #>> '{reason}'\n\n like '%intended behavior%'\n then connector_log.created_at else null end) as last_priority_first_sync_completed_at,\n \n\n max(case when connector_log.event_type = 'SEVERE' then connector_log.created_at else null end) as last_error_at,\n\n max(case when connector_log.event_type = 'SEVERE' then connector_log.sync_batch_id else null end) as last_error_batch,\n max(case when event_type = 'WARNING' then connector_log.created_at else null end) as last_warning_at\n\n from connector \n left join connector_log \n on connector_log.connector_id = connector.connector_id\n group by 1,2,3,4,5,6\n\n),\n\nconnector_health as (\n\n select\n *,\n case \n -- connector is paused\n when is_paused then 'paused'\n\n -- a sync has never been attempted\n when last_sync_started_at is null then 'incomplete'\n\n -- a priority-first sync has occurred, but a normal sync has not\n when last_priority_first_sync_completed_at is not null and last_sync_completed_at is null then 'priority first sync'\n\n -- a priority sync has occurred more recently than a normal one (may occur if the connector has been paused and resumed)\n when last_priority_first_sync_completed_at > last_sync_completed_at then 'priority first sync'\n\n -- a sync has been attempted, but not completed, and it's not due to errors. also a priority-first sync hasn't\n when last_sync_completed_at is null and last_error_at is null then 'initial sync in progress'\n\n -- the last attempted sync had an error\n when last_sync_batch_id = last_error_batch then 'broken'\n\n -- there's never been a successful sync and there have been errors\n when last_sync_completed_at is null and last_error_at is not null then 'broken'\n\n else 'connected' end as connector_health\n\n from connector_metrics\n),\n\n-- Joining with log to grab pertinent error/warning messagees\nconnector_recent_logs as (\n\n select \n connector_health.connector_id,\n connector_health.connector_name,\n connector_health.connector_type,\n connector_health.destination_id,\n connector_health.connector_health,\n connector_health.last_successful_sync_completed_at,\n connector_health.last_sync_started_at,\n connector_health.last_sync_completed_at,\n connector_health.set_up_at,\n connector_log.event_subtype,\n connector_log.event_type,\n connector_log.message_data\n\n from connector_health \n left join connector_log \n on connector_log.connector_id = connector_health.connector_id\n -- limiting relevance to since the last successful sync completion (if there has been one)\n and connector_log.created_at > coalesce(connector_health.last_sync_completed_at, connector_health.last_priority_first_sync_completed_at, '2000-01-01') \n -- only looking at errors and warnings (excluding syncs - both normal and priority first)\n and connector_log.event_type != 'INFO' \n -- need to explicitly avoid priority first statuses because they are of event_type WARNING\n and not (connector_log.event_subtype = 'status' \n and \n\n connector_log.message_data::json #>> '{status}'\n\n ='RESCHEDULED'\n and \n\n connector_log.message_data::json #>> '{reason}'\n\n like '%intended behavior%')\n\n group by 1,2,3,4,5,6,7,8,9,10,11,12 -- de-duping error messages\n \n\n),\n\nfinal as (\n\n select\n connector_recent_logs.connector_id,\n connector_recent_logs.connector_name,\n connector_recent_logs.connector_type,\n connector_recent_logs.destination_id,\n destination.destination_name,\n connector_recent_logs.connector_health,\n connector_recent_logs.last_successful_sync_completed_at,\n connector_recent_logs.last_sync_started_at,\n connector_recent_logs.last_sync_completed_at,\n connector_recent_logs.set_up_at,\n coalesce(schema_changes.number_of_schema_changes_last_month, 0) as number_of_schema_changes_last_month\n \n \n , \n string_agg(distinct case when connector_recent_logs.event_type = 'SEVERE' then connector_recent_logs.message_data else null end, '\\n')\n\n as errors_since_last_completed_sync\n , \n string_agg(distinct case when connector_recent_logs.event_type = 'WARNING' then connector_recent_logs.message_data else null end, '\\n')\n\n as warnings_since_last_completed_sync\n \n\n from connector_recent_logs\n left join schema_changes \n on connector_recent_logs.connector_id = schema_changes.connector_id \n\n join destination on destination.destination_id = connector_recent_logs.destination_id\n group by 1,2,3,4,5,6,7,8,9,10,11\n)\n\nselect * from final", "extra_ctes_injected": true, "extra_ctes": [], "relation_name": "\"postgres\".\"fivetrans_logs_integration_tests_fivetran_log\".\"fivetran_log__connector_status\"" @@ -1685,7 +1685,7 @@ }, "connector_id": { "name": "connector_id", - "description": "System-generated uniqu ID of the connector loading this table.", + "description": "System-generated unique ID of the connector loading this table.", "meta": {}, "data_type": null, "quote": null, @@ -1835,7 +1835,7 @@ "fivetran_log__usage_mar_destination_history" ], "unique_id": "model.fivetran_log.fivetran_log__usage_mar_destination_history", - "raw_code": "with table_mar as (\n \n select *\n from {{ ref('fivetran_log__mar_table_history') }}\n),\n\ncredits_used as (\n\n select *\n from {{ ref('stg_fivetran_log__credits_used') }}\n),\n\nuseage_cost as (\n\n select *\n from {{ ref('stg_fivetran_log__usage_cost') }}\n),\n\ndestination_mar as (\n\n select \n measured_month,\n destination_id,\n destination_name,\n sum(free_monthly_active_rows) as free_monthly_active_rows,\n sum(paid_monthly_active_rows) as paid_monthly_active_rows,\n sum(total_monthly_active_rows) as total_monthly_active_rows\n from table_mar\n group by 1,2,3\n),\n\nusage as (\n\n select \n coalesce(credits_used.destination_id, useage_cost.destination_id) as destination_id,\n credits_used.credits_spent,\n useage_cost.dollars_spent,\n cast(concat(coalesce(credits_used.measured_month,useage_cost.measured_month), '-01') as date) as measured_month -- match date format to join with MAR table\n from credits_used\n full outer join useage_cost\n on useage_cost.measured_month = credits_used.measured_month\n and useage_cost.destination_id = credits_used.destination_id\n),\n\njoin_usage_mar as (\n\n select \n destination_mar.measured_month,\n destination_mar.destination_id,\n destination_mar.destination_name,\n usage.credits_spent,\n usage.dollars_spent,\n destination_mar.free_monthly_active_rows,\n destination_mar.paid_monthly_active_rows,\n destination_mar.total_monthly_active_rows,\n\n -- credit and usage mar calculations\n round( cast(nullif(usage.credits_spent,0) * 1000000.0 as {{ dbt.type_numeric() }}) / cast(nullif(destination_mar.total_monthly_active_rows,0) as {{ dbt.type_numeric() }}), 2) as credits_spent_per_million_mar,\n round( cast(nullif(destination_mar.total_monthly_active_rows,0) * 1.0 as {{ dbt.type_numeric() }}) / cast(nullif(usage.credits_spent,0) as {{ dbt.type_numeric() }}), 0) as mar_per_credit_spent,\n round( cast(nullif(usage.dollars_spent,0) * 1000000.0 as {{ dbt.type_numeric() }}) / cast(nullif(destination_mar.total_monthly_active_rows,0) as {{ dbt.type_numeric() }}), 2) as amount_spent_per_million_mar,\n round( cast(nullif(destination_mar.total_monthly_active_rows,0) * 1.0 as {{ dbt.type_numeric() }}) / cast(nullif(usage.dollars_spent,0) as {{ dbt.type_numeric() }}), 0) as mar_per_amount_spent\n from destination_mar \n left join usage \n on destination_mar.measured_month = cast(usage.measured_month as timestamp)\n and destination_mar.destination_id = usage.destination_id\n)\n\nselect * \nfrom join_usage_mar\norder by measured_month desc", + "raw_code": "with table_mar as (\n \n select *\n from {{ ref('fivetran_log__mar_table_history') }}\n),\n\ncredits_used as (\n\n select *\n from {{ ref('stg_fivetran_log__credits_used') }}\n),\n\nuseage_cost as (\n\n select *\n from {{ ref('stg_fivetran_log__usage_cost') }}\n),\n\ndestination_mar as (\n\n select \n measured_month,\n destination_id,\n destination_name,\n sum(free_monthly_active_rows) as free_monthly_active_rows,\n sum(paid_monthly_active_rows) as paid_monthly_active_rows,\n sum(total_monthly_active_rows) as total_monthly_active_rows\n from table_mar\n group by 1,2,3\n),\n\nusage as (\n\n select \n coalesce(credits_used.destination_id, usage_cost.destination_id) as destination_id,\n credits_used.credits_spent,\n usage_cost.dollars_spent,\n cast(concat(coalesce(credits_used.measured_month,usage_cost.measured_month), '-01') as date) as measured_month -- match date format to join with MAR table\n from credits_used\n full outer join usage_cost\n on usage_cost.measured_month = credits_used.measured_month\n and usage_cost.destination_id = credits_used.destination_id\n),\n\njoin_usage_mar as (\n\n select \n destination_mar.measured_month,\n destination_mar.destination_id,\n destination_mar.destination_name,\n usage.credits_spent,\n usage.dollars_spent,\n destination_mar.free_monthly_active_rows,\n destination_mar.paid_monthly_active_rows,\n destination_mar.total_monthly_active_rows,\n\n -- credit and usage mar calculations\n round( cast(nullif(usage.credits_spent,0) * 1000000.0 as {{ dbt.type_numeric() }}) / cast(nullif(destination_mar.total_monthly_active_rows,0) as {{ dbt.type_numeric() }}), 2) as credits_spent_per_million_mar,\n round( cast(nullif(destination_mar.total_monthly_active_rows,0) * 1.0 as {{ dbt.type_numeric() }}) / cast(nullif(usage.credits_spent,0) as {{ dbt.type_numeric() }}), 0) as mar_per_credit_spent,\n round( cast(nullif(usage.dollars_spent,0) * 1000000.0 as {{ dbt.type_numeric() }}) / cast(nullif(destination_mar.total_monthly_active_rows,0) as {{ dbt.type_numeric() }}), 2) as amount_spent_per_million_mar,\n round( cast(nullif(destination_mar.total_monthly_active_rows,0) * 1.0 as {{ dbt.type_numeric() }}) / cast(nullif(usage.dollars_spent,0) as {{ dbt.type_numeric() }}), 0) as mar_per_amount_spent\n from destination_mar \n left join usage \n on destination_mar.measured_month = cast(usage.measured_month as timestamp)\n and destination_mar.destination_id = usage.destination_id\n)\n\nselect * \nfrom join_usage_mar\norder by measured_month desc", "language": "sql", "package_name": "fivetran_log", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/fivetran_log/dbt_fivetran_log/integration_tests/dbt_packages/fivetran_log", @@ -1974,7 +1974,7 @@ "schema": "fivetran_log" }, "created_at": 1676567893.3579001, - "compiled_code": "with table_mar as (\n \n select *\n from \"postgres\".\"fivetrans_logs_integration_tests_fivetran_log\".\"fivetran_log__mar_table_history\"\n),\n\ncredits_used as (\n\n select *\n from \"postgres\".\"fivetrans_logs_integration_tests_fivetran_log\".\"stg_fivetran_log__credits_used\"\n),\n\nuseage_cost as (\n\n select *\n from \"postgres\".\"fivetrans_logs_integration_tests_fivetran_log\".\"stg_fivetran_log__usage_cost\"\n),\n\ndestination_mar as (\n\n select \n measured_month,\n destination_id,\n destination_name,\n sum(free_monthly_active_rows) as free_monthly_active_rows,\n sum(paid_monthly_active_rows) as paid_monthly_active_rows,\n sum(total_monthly_active_rows) as total_monthly_active_rows\n from table_mar\n group by 1,2,3\n),\n\nusage as (\n\n select \n coalesce(credits_used.destination_id, useage_cost.destination_id) as destination_id,\n credits_used.credits_spent,\n useage_cost.dollars_spent,\n cast(concat(coalesce(credits_used.measured_month,useage_cost.measured_month), '-01') as date) as measured_month -- match date format to join with MAR table\n from credits_used\n full outer join useage_cost\n on useage_cost.measured_month = credits_used.measured_month\n and useage_cost.destination_id = credits_used.destination_id\n),\n\njoin_usage_mar as (\n\n select \n destination_mar.measured_month,\n destination_mar.destination_id,\n destination_mar.destination_name,\n usage.credits_spent,\n usage.dollars_spent,\n destination_mar.free_monthly_active_rows,\n destination_mar.paid_monthly_active_rows,\n destination_mar.total_monthly_active_rows,\n\n -- credit and usage mar calculations\n round( cast(nullif(usage.credits_spent,0) * 1000000.0 as numeric(28,6)) / cast(nullif(destination_mar.total_monthly_active_rows,0) as numeric(28,6)), 2) as credits_spent_per_million_mar,\n round( cast(nullif(destination_mar.total_monthly_active_rows,0) * 1.0 as numeric(28,6)) / cast(nullif(usage.credits_spent,0) as numeric(28,6)), 0) as mar_per_credit_spent,\n round( cast(nullif(usage.dollars_spent,0) * 1000000.0 as numeric(28,6)) / cast(nullif(destination_mar.total_monthly_active_rows,0) as numeric(28,6)), 2) as amount_spent_per_million_mar,\n round( cast(nullif(destination_mar.total_monthly_active_rows,0) * 1.0 as numeric(28,6)) / cast(nullif(usage.dollars_spent,0) as numeric(28,6)), 0) as mar_per_amount_spent\n from destination_mar \n left join usage \n on destination_mar.measured_month = cast(usage.measured_month as timestamp)\n and destination_mar.destination_id = usage.destination_id\n)\n\nselect * \nfrom join_usage_mar\norder by measured_month desc", + "compiled_code": "with table_mar as (\n \n select *\n from \"postgres\".\"fivetrans_logs_integration_tests_fivetran_log\".\"fivetran_log__mar_table_history\"\n),\n\ncredits_used as (\n\n select *\n from \"postgres\".\"fivetrans_logs_integration_tests_fivetran_log\".\"stg_fivetran_log__credits_used\"\n),\n\nuseage_cost as (\n\n select *\n from \"postgres\".\"fivetrans_logs_integration_tests_fivetran_log\".\"stg_fivetran_log__usage_cost\"\n),\n\ndestination_mar as (\n\n select \n measured_month,\n destination_id,\n destination_name,\n sum(free_monthly_active_rows) as free_monthly_active_rows,\n sum(paid_monthly_active_rows) as paid_monthly_active_rows,\n sum(total_monthly_active_rows) as total_monthly_active_rows\n from table_mar\n group by 1,2,3\n),\n\nusage as (\n\n select \n coalesce(credits_used.destination_id, usage_cost.destination_id) as destination_id,\n credits_used.credits_spent,\n usage_cost.dollars_spent,\n cast(concat(coalesce(credits_used.measured_month,usage_cost.measured_month), '-01') as date) as measured_month -- match date format to join with MAR table\n from credits_used\n full outer join usage_cost\n on usage_cost.measured_month = credits_used.measured_month\n and usage_cost.destination_id = credits_used.destination_id\n),\n\njoin_usage_mar as (\n\n select \n destination_mar.measured_month,\n destination_mar.destination_id,\n destination_mar.destination_name,\n usage.credits_spent,\n usage.dollars_spent,\n destination_mar.free_monthly_active_rows,\n destination_mar.paid_monthly_active_rows,\n destination_mar.total_monthly_active_rows,\n\n -- credit and usage mar calculations\n round( cast(nullif(usage.credits_spent,0) * 1000000.0 as numeric(28,6)) / cast(nullif(destination_mar.total_monthly_active_rows,0) as numeric(28,6)), 2) as credits_spent_per_million_mar,\n round( cast(nullif(destination_mar.total_monthly_active_rows,0) * 1.0 as numeric(28,6)) / cast(nullif(usage.credits_spent,0) as numeric(28,6)), 0) as mar_per_credit_spent,\n round( cast(nullif(usage.dollars_spent,0) * 1000000.0 as numeric(28,6)) / cast(nullif(destination_mar.total_monthly_active_rows,0) as numeric(28,6)), 2) as amount_spent_per_million_mar,\n round( cast(nullif(destination_mar.total_monthly_active_rows,0) * 1.0 as numeric(28,6)) / cast(nullif(usage.dollars_spent,0) as numeric(28,6)), 0) as mar_per_amount_spent\n from destination_mar \n left join usage \n on destination_mar.measured_month = cast(usage.measured_month as timestamp)\n and destination_mar.destination_id = usage.destination_id\n)\n\nselect * \nfrom join_usage_mar\norder by measured_month desc", "extra_ctes_injected": true, "extra_ctes": [], "relation_name": "\"postgres\".\"fivetrans_logs_integration_tests_fivetran_log\".\"fivetran_log__usage_mar_destination_history\"" @@ -2635,7 +2635,7 @@ }, "destination_id": { "name": "destination_id", - "description": "Foreign key referenencing the `destination` whose data is being transformed.", + "description": "Foreign key referencing the `destination` whose data is being transformed.", "meta": {}, "data_type": null, "quote": null, @@ -5466,7 +5466,7 @@ "column_name": null, "file_key_name": "models.stg_fivetran_log__account_membership" }, - "test.fivetran_log.dbt_utils_unique_combination_of_columns_stg_fivetran_log__destination_membership_destination_id__user_id.daa65572da": { + "test.fivetran_log.dbt_utils_unique_combination_of_columns_stg_fivetran_log__destination_membership_destination_id__user_id.data65572da": { "test_metadata": { "name": "unique_combination_of_columns", "kwargs": { @@ -5512,7 +5512,7 @@ "staging", "dbt_utils_unique_combination_of_columns_stg_fivetran_log__destination_membership_destination_id__user_id" ], - "unique_id": "test.fivetran_log.dbt_utils_unique_combination_of_columns_stg_fivetran_log__destination_membership_destination_id__user_id.daa65572da", + "unique_id": "test.fivetran_log.dbt_utils_unique_combination_of_columns_stg_fivetran_log__destination_membership_destination_id__user_id.data65572da", "raw_code": "{{ dbt_utils.test_unique_combination_of_columns(**_dbt_generic_test_kwargs) }}{{ config(alias=\"dbt_utils_unique_combination_o_7e69d0b4ca5bab4be82764ed4339fd2a\") }}", "language": "sql", "package_name": "fivetran_log", @@ -8653,7 +8653,7 @@ "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "name": "materialization_snapshot_default", - "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", + "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparison later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -9416,7 +9416,7 @@ "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "name": "materialization_incremental_default", - "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", + "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -9564,7 +9564,7 @@ "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "name": "materialization_table_default", - "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", + "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -9704,7 +9704,7 @@ "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "name": "materialization_view_default", - "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", + "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": { @@ -9925,7 +9925,7 @@ "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "name": "materialization_seed_default", - "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparision later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", + "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -10665,7 +10665,7 @@ "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "dates_in_range", - "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partiton start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", + "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -16977,7 +16977,7 @@ "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "name": "default__width_bucket", - "macro_sql": "{% macro default__width_bucket(expr, min_value, max_value, num_buckets) -%}\n\n {% set bin_size -%}\n (( {{ max_value }} - {{ min_value }} ) / {{ num_buckets }} )\n {%- endset %}\n (\n -- to break ties when the amount is eaxtly at the bucket egde\n case\n when\n mod(\n {{ dbt.safe_cast(expr, dbt.type_numeric() ) }},\n {{ dbt.safe_cast(bin_size, dbt.type_numeric() ) }}\n ) = 0\n then 1\n else 0\n end\n ) +\n -- Anything over max_value goes the N+1 bucket\n least(\n ceil(\n ({{ expr }} - {{ min_value }})/{{ bin_size }}\n ),\n {{ num_buckets }} + 1\n )\n{%- endmacro %}", + "macro_sql": "{% macro default__width_bucket(expr, min_value, max_value, num_buckets) -%}\n\n {% set bin_size -%}\n (( {{ max_value }} - {{ min_value }} ) / {{ num_buckets }} )\n {%- endset %}\n (\n -- to break ties when the amount is eaxtly at the bucket edge\n case\n when\n mod(\n {{ dbt.safe_cast(expr, dbt.type_numeric() ) }},\n {{ dbt.safe_cast(bin_size, dbt.type_numeric() ) }}\n ) = 0\n then 1\n else 0\n end\n ) +\n -- Anything over max_value goes the N+1 bucket\n least(\n ceil(\n ({{ expr }} - {{ min_value }})/{{ bin_size }}\n ),\n {{ num_buckets }} + 1\n )\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -20099,7 +20099,7 @@ "test.fivetran_log.dbt_utils_unique_combination_of_columns_stg_fivetran_log__account_membership_account_id__user_id.9d4b4c8ce6": [ "model.fivetran_log.stg_fivetran_log__account_membership" ], - "test.fivetran_log.dbt_utils_unique_combination_of_columns_stg_fivetran_log__destination_membership_destination_id__user_id.daa65572da": [ + "test.fivetran_log.dbt_utils_unique_combination_of_columns_stg_fivetran_log__destination_membership_destination_id__user_id.data65572da": [ "model.fivetran_log.stg_fivetran_log__destination_membership" ], "test.fivetran_log.dbt_utils_unique_combination_of_columns_stg_fivetran_log__transformation_transformation_id__destination_id.53b3485eb3": [ @@ -20173,7 +20173,7 @@ "test.fivetran_log.dbt_utils_unique_combination_of_columns_stg_fivetran_log__credits_used_measured_month__destination_id.ad5114f878" ], "model.fivetran_log.stg_fivetran_log__destination_membership": [ - "test.fivetran_log.dbt_utils_unique_combination_of_columns_stg_fivetran_log__destination_membership_destination_id__user_id.daa65572da" + "test.fivetran_log.dbt_utils_unique_combination_of_columns_stg_fivetran_log__destination_membership_destination_id__user_id.data65572da" ], "model.fivetran_log.stg_fivetran_log__transformation": [ "model.fivetran_log.fivetran_log__transformation_status", @@ -20243,7 +20243,7 @@ "test.fivetran_log.unique_stg_fivetran_log__account_account_id.3de58e95dc": [], "test.fivetran_log.not_null_stg_fivetran_log__account_account_id.19a03e662a": [], "test.fivetran_log.dbt_utils_unique_combination_of_columns_stg_fivetran_log__account_membership_account_id__user_id.9d4b4c8ce6": [], - "test.fivetran_log.dbt_utils_unique_combination_of_columns_stg_fivetran_log__destination_membership_destination_id__user_id.daa65572da": [], + "test.fivetran_log.dbt_utils_unique_combination_of_columns_stg_fivetran_log__destination_membership_destination_id__user_id.data65572da": [], "test.fivetran_log.dbt_utils_unique_combination_of_columns_stg_fivetran_log__transformation_transformation_id__destination_id.53b3485eb3": [], "test.fivetran_log.dbt_utils_unique_combination_of_columns_stg_fivetran_log__trigger_table_trigger_table__transformation_id.124bb8d5ca": [], "test.fivetran_log.unique_stg_fivetran_log__user_user_id.25561a7dda": [], diff --git a/samples/shopify/manifest.json b/samples/shopify/manifest.json index 5e517f8..7669abb 100644 --- a/samples/shopify/manifest.json +++ b/samples/shopify/manifest.json @@ -15249,7 +15249,7 @@ }, "first_order_timestamp": { "name": "first_order_timestamp", - "description": "The timetamp of the first order against this inventory level.", + "description": "The timestamp of the first order against this inventory level.", "meta": {}, "data_type": null, "quote": null, @@ -25115,7 +25115,7 @@ }, "config": { "enabled": true, - "alias": "not_null_stg_shopify__abandone_69cca4dad786feddbb78552d60221d7c", + "alias": "not_null_stg_shopify__abandon_69cca4dad786feddbb78552d60221d7c", "schema": "dbt_test__audit", "database": null, "tags": [], @@ -25136,14 +25136,14 @@ "not_null_stg_shopify__abandoned_checkout_discount_code_checkout_id" ], "unique_id": "test.shopify_source.not_null_stg_shopify__abandoned_checkout_discount_code_checkout_id.c00e9d48a7", - "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}{{ config(alias=\"not_null_stg_shopify__abandone_69cca4dad786feddbb78552d60221d7c\") }}", + "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}{{ config(alias=\"not_null_stg_shopify__abandon_69cca4dad786feddbb78552d60221d7c\") }}", "language": "sql", "package_name": "shopify_source", "root_path": "/Users/jamie.rodriguez/Desktop/dbt_repos/Shopify/dbt_shopify/integration_tests/dbt_packages/shopify_source", - "path": "not_null_stg_shopify__abandone_69cca4dad786feddbb78552d60221d7c.sql", + "path": "not_null_stg_shopify__abandon_69cca4dad786feddbb78552d60221d7c.sql", "original_file_path": "models/stg_shopify.yml", "name": "not_null_stg_shopify__abandoned_checkout_discount_code_checkout_id", - "alias": "not_null_stg_shopify__abandone_69cca4dad786feddbb78552d60221d7c", + "alias": "not_null_stg_shopify__abandon_69cca4dad786feddbb78552d60221d7c", "checksum": { "name": "none", "checksum": "" @@ -25168,7 +25168,7 @@ "build_path": null, "deferred": false, "unrendered_config": { - "alias": "not_null_stg_shopify__abandone_69cca4dad786feddbb78552d60221d7c" + "alias": "not_null_stg_shopify__abandon_69cca4dad786feddbb78552d60221d7c" }, "created_at": 1675199138.487849, "compiled_code": "\n \n \n\n\n\nselect checkout_id\nfrom `dbt-package-testing`.`shopify_source_integration_tests_1_stg_shopify`.`stg_shopify__abandoned_checkout_discount_code`\nwhere checkout_id is null\n\n\n", @@ -30002,7 +30002,7 @@ "data_type": null, "quote": null, "tags": [], - "descripton": "Tax code applied to the line item. As multiple taxes can apply to a line item, we recommend referring to `stg_shopify__tax_line`." + "description": "Tax code applied to the line item. As multiple taxes can apply to a line item, we recommend referring to `stg_shopify__tax_line`." }, "total_discount_set": { "name": "total_discount_set", @@ -32026,7 +32026,7 @@ }, "rules": { "name": "rules", - "description": "An array of rules that define what products go into the smart collection. Each rule (`column` -- `relation` --> `condition`) has these properties: - `column`: the property of a product being used to populate the smart collection. Ex: 'tag', 'type', 'vendor', 'variant_price', etc. - `relation`: The comparitive relationship between the column choice, and the condition ('equals', 'contains', 'greater_than', etc.) - condition: Select products for a smart collection using a condition. Values are either strings or numbers, depending on the relation value. See the [Shopify docs](https://shopify.dev/api/admin-rest/2022-10/resources/smartcollection#resource-object) for more.\n", + "description": "An array of rules that define what products go into the smart collection. Each rule (`column` -- `relation` --> `condition`) has these properties: - `column`: the property of a product being used to populate the smart collection. Ex: 'tag', 'type', 'vendor', 'variant_price', etc. - `relation`: The comparative relationship between the column choice, and the condition ('equals', 'contains', 'greater_than', etc.) - condition: Select products for a smart collection using a condition. Values are either strings or numbers, depending on the relation value. See the [Shopify docs](https://shopify.dev/api/admin-rest/2022-10/resources/smartcollection#resource-object) for more.\n", "meta": {}, "data_type": null, "quote": null, @@ -32118,7 +32118,7 @@ }, "index": { "name": "index", - "description": "Index (starting from 1) represnting when the tag was placed on the customer.", + "description": "Index (starting from 1) representing when the tag was placed on the customer.", "meta": {}, "data_type": null, "quote": null, @@ -32366,7 +32366,7 @@ }, "shipment_status": { "name": "shipment_status", - "description": "The current shipment status of the fulfillment. Valid values include: - label_printed: A label for the shipment was purchased and printed. - label_purchased: A label for the shipment was purchased, but not printed. - attempted_delivery: Delivery of the shipment was attempted, but unable to be completed. - ready_for_pickup: The shipment is ready for pickup at a shipping depot. - confirmed: The carrier is aware of the shipment, but hasn't received it yet. - in_transit: The shipment is being transported between shipping facilities on the way to its destination. - out_for_delivery: The shipment is being delivered to its final destination. - delivered: The shipment was succesfully delivered. - failure: Something went wrong when pulling tracking information for the shipment, such as the tracking number was invalid or the shipment was canceled.\n", + "description": "The current shipment status of the fulfillment. Valid values include: - label_printed: A label for the shipment was purchased and printed. - label_purchased: A label for the shipment was purchased, but not printed. - attempted_delivery: Delivery of the shipment was attempted, but unable to be completed. - ready_for_pickup: The shipment is ready for pickup at a shipping depot. - confirmed: The carrier is aware of the shipment, but hasn't received it yet. - in_transit: The shipment is being transported between shipping facilities on the way to its destination. - out_for_delivery: The shipment is being delivered to its final destination. - delivered: The shipment was successfully delivered. - failure: Something went wrong when pulling tracking information for the shipment, such as the tracking number was invalid or the shipment was canceled.\n", "meta": {}, "data_type": null, "quote": null, @@ -33330,7 +33330,7 @@ }, "order_shipping_line_id": { "name": "order_shipping_line_id", - "description": "ID of the order shipping line this recod is associated with.", + "description": "ID of the order shipping line this record is associated with.", "meta": {}, "data_type": null, "quote": null, @@ -33430,7 +33430,7 @@ }, "index": { "name": "index", - "description": "Index (starting from 1) represnting when the tag was placed on the order.", + "description": "Index (starting from 1) representing when the tag was placed on the order.", "meta": {}, "data_type": null, "quote": null, @@ -33966,7 +33966,7 @@ }, "index": { "name": "index", - "description": "Index (starting from 1) represnting when the tag was placed on the product.", + "description": "Index (starting from 1) representing when the tag was placed on the product.", "meta": {}, "data_type": null, "quote": null, @@ -34362,7 +34362,7 @@ }, "pre_launch_enabled": { "name": "pre_launch_enabled", - "description": "Boolen representing whether the pre-launch page is enabled on the shop's online store.", + "description": "Boolean representing whether the pre-launch page is enabled on the shop's online store.", "meta": {}, "data_type": null, "quote": null, @@ -36079,7 +36079,7 @@ "path": "macros/materializations/view.sql", "original_file_path": "macros/materializations/view.sql", "name": "materialization_view_bigquery", - "macro_sql": "{% materialization view, adapter='bigquery' -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {% set to_return = create_or_replace_view() %}\n\n {% set target_relation = this.incorporate(type='view') %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if config.get('grant_access_to') %}\n {% for grant_target_dict in config.get('grant_access_to') %}\n {% do adapter.grant_access_to(this, 'view', None, grant_target_dict) %}\n {% endfor %}\n {% endif %}\n\n {% do return(to_return) %}\n\n{%- endmaterialization %}", + "macro_sql": "{% materialization view, adapter='bigquery' -%}\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n\n {% set to_return = create_or_replace_view() %}\n\n {% set target_relation = this.incorporate(type='view') %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if config.get('grant_access_to') %}\n {% for grant_target_dict in config.get('grant_access_to') %}\n {% do adapter.grant_access_to(this, 'view', None, grant_target_dict) %}\n {% endfor %}\n {% endif %}\n\n {% do return(to_return) %}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -36108,7 +36108,7 @@ "path": "macros/materializations/table.sql", "original_file_path": "macros/materializations/table.sql", "name": "materialization_table_bigquery", - "macro_sql": "{% materialization table, adapter='bigquery', supported_languages=['sql', 'python']-%}\n\n {%- set language = model['language'] -%}\n {%- set identifier = model['alias'] -%}\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_not_as_table = (old_relation is not none and not old_relation.is_table) -%}\n {%- set target_relation = api.Relation.create(database=database, schema=schema, identifier=identifier, type='table') -%}\n\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {{ run_hooks(pre_hooks) }}\n\n {#\n We only need to drop this thing if it is not a table.\n If it _is_ already a table, then we can overwrite it without downtime\n Unlike table -> view, no need for `--full-refresh`: dropping a view is no big deal\n #}\n {%- if exists_not_as_table -%}\n {{ adapter.drop_relation(old_relation) }}\n {%- endif -%}\n\n -- build model\n {%- set raw_partition_by = config.get('partition_by', none) -%}\n {%- set partition_by = adapter.parse_partition_by(raw_partition_by) -%}\n {%- set cluster_by = config.get('cluster_by', none) -%}\n {% if not adapter.is_replaceable(old_relation, partition_by, cluster_by) %}\n {% do log(\"Hard refreshing \" ~ old_relation ~ \" because it is not replaceable\") %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n -- build model\n {%- call statement('main', language=language) -%}\n {{ create_table_as(False, target_relation, compiled_code, language) }}\n {%- endcall -%}\n\n {{ run_hooks(post_hooks) }}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", + "macro_sql": "{% materialization table, adapter='bigquery', supported_languages=['sql', 'python']-%}\n\n {%- set language = model['language'] -%}\n {%- set identifier = model['alias'] -%}\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_not_as_table = (old_relation is not none and not old_relation.is_table) -%}\n {%- set target_relation = api.Relation.create(database=database, schema=schema, identifier=identifier, type='table') -%}\n\n -- grab current tables grants config for comparison later on\n {%- set grant_config = config.get('grants') -%}\n\n {{ run_hooks(pre_hooks) }}\n\n {#\n We only need to drop this thing if it is not a table.\n If it _is_ already a table, then we can overwrite it without downtime\n Unlike table -> view, no need for `--full-refresh`: dropping a view is no big deal\n #}\n {%- if exists_not_as_table -%}\n {{ adapter.drop_relation(old_relation) }}\n {%- endif -%}\n\n -- build model\n {%- set raw_partition_by = config.get('partition_by', none) -%}\n {%- set partition_by = adapter.parse_partition_by(raw_partition_by) -%}\n {%- set cluster_by = config.get('cluster_by', none) -%}\n {% if not adapter.is_replaceable(old_relation, partition_by, cluster_by) %}\n {% do log(\"Hard refreshing \" ~ old_relation ~ \" because it is not replaceable\") %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n -- build model\n {%- call statement('main', language=language) -%}\n {{ create_table_as(False, target_relation, compiled_code, language) }}\n {%- endcall -%}\n\n {{ run_hooks(post_hooks) }}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -36298,7 +36298,7 @@ "path": "macros/materializations/incremental.sql", "original_file_path": "macros/materializations/incremental.sql", "name": "materialization_incremental_bigquery", - "macro_sql": "{% materialization incremental, adapter='bigquery', supported_languages=['sql', 'python'] -%}\n\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n {%- set language = model['language'] %}\n\n {%- set target_relation = this %}\n {%- set existing_relation = load_relation(this) %}\n {%- set tmp_relation = make_temp_relation(this) %}\n\n {#-- Validate early so we don't run SQL if the strategy is invalid --#}\n {% set strategy = dbt_bigquery_validate_get_incremental_strategy(config) -%}\n\n {%- set raw_partition_by = config.get('partition_by', none) -%}\n {%- set partition_by = adapter.parse_partition_by(raw_partition_by) -%}\n {%- set partitions = config.get('partitions', none) -%}\n {%- set cluster_by = config.get('cluster_by', none) -%}\n\n {% set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') %}\n\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n {% if existing_relation is none %}\n {%- call statement('main', language=language) -%}\n {{ create_table_as(False, target_relation, compiled_code, language) }}\n {%- endcall -%}\n\n {% elif existing_relation.is_view %}\n {#-- There's no way to atomically replace a view with a table on BQ --#}\n {{ adapter.drop_relation(existing_relation) }}\n {%- call statement('main', language=language) -%}\n {{ create_table_as(False, target_relation, compiled_code, language) }}\n {%- endcall -%}\n\n {% elif full_refresh_mode %}\n {#-- If the partition/cluster config has changed, then we must drop and recreate --#}\n {% if not adapter.is_replaceable(existing_relation, partition_by, cluster_by) %}\n {% do log(\"Hard refreshing \" ~ existing_relation ~ \" because it is not replaceable\") %}\n {{ adapter.drop_relation(existing_relation) }}\n {% endif %}\n {%- call statement('main', language=language) -%}\n {{ create_table_as(False, target_relation, compiled_code, language) }}\n {%- endcall -%}\n\n {% else %}\n {%- if language == 'python' and strategy == 'insert_overwrite' -%}\n {#-- This lets us move forward assuming no python will be directly templated into a query --#}\n {%- set python_unsupported_msg -%}\n The 'insert_overwrite' strategy is not yet supported for python models.\n {%- endset %}\n {% do exceptions.raise_compiler_error(python_unsupported_msg) %}\n {%- endif -%}\n\n {% set tmp_relation_exists = false %}\n {% if on_schema_change != 'ignore' or language == 'python' %}\n {#-- Check first, since otherwise we may not build a temp table --#}\n {#-- Python always needs to create a temp table --#}\n {%- call statement('create_tmp_relation', language=language) -%}\n {{ declare_dbt_max_partition(this, partition_by, compiled_code, language) +\n create_table_as(True, tmp_relation, compiled_code, language)\n }}\n {%- endcall -%}\n {% set tmp_relation_exists = true %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, tmp_relation, existing_relation) %}\n {% endif %}\n\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n {% set build_sql = bq_generate_incremental_build_sql(\n strategy, tmp_relation, target_relation, compiled_code, unique_key, partition_by, partitions, dest_columns, tmp_relation_exists\n ) %}\n\n {%- call statement('main') -%}\n {{ build_sql }}\n {% endcall %}\n\n {%- if language == 'python' and tmp_relation -%}\n {{ adapter.drop_relation(tmp_relation) }}\n {%- endif -%}\n\n {% endif %}\n\n {{ run_hooks(post_hooks) }}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", + "macro_sql": "{% materialization incremental, adapter='bigquery', supported_languages=['sql', 'python'] -%}\n\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n {%- set language = model['language'] %}\n\n {%- set target_relation = this %}\n {%- set existing_relation = load_relation(this) %}\n {%- set tmp_relation = make_temp_relation(this) %}\n\n {#-- Validate early so we don't run SQL if the strategy is invalid --#}\n {% set strategy = dbt_bigquery_validate_get_incremental_strategy(config) -%}\n\n {%- set raw_partition_by = config.get('partition_by', none) -%}\n {%- set partition_by = adapter.parse_partition_by(raw_partition_by) -%}\n {%- set partitions = config.get('partitions', none) -%}\n {%- set cluster_by = config.get('cluster_by', none) -%}\n\n {% set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') %}\n\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n {% if existing_relation is none %}\n {%- call statement('main', language=language) -%}\n {{ create_table_as(False, target_relation, compiled_code, language) }}\n {%- endcall -%}\n\n {% elif existing_relation.is_view %}\n {#-- There's no way to atomically replace a view with a table on BQ --#}\n {{ adapter.drop_relation(existing_relation) }}\n {%- call statement('main', language=language) -%}\n {{ create_table_as(False, target_relation, compiled_code, language) }}\n {%- endcall -%}\n\n {% elif full_refresh_mode %}\n {#-- If the partition/cluster config has changed, then we must drop and recreate --#}\n {% if not adapter.is_replaceable(existing_relation, partition_by, cluster_by) %}\n {% do log(\"Hard refreshing \" ~ existing_relation ~ \" because it is not replaceable\") %}\n {{ adapter.drop_relation(existing_relation) }}\n {% endif %}\n {%- call statement('main', language=language) -%}\n {{ create_table_as(False, target_relation, compiled_code, language) }}\n {%- endcall -%}\n\n {% else %}\n {%- if language == 'python' and strategy == 'insert_overwrite' -%}\n {#-- This lets us move forward assuming no python will be directly templated into a query --#}\n {%- set python_unsupported_msg -%}\n The 'insert_overwrite' strategy is not yet supported for python models.\n {%- endset %}\n {% do exceptions.raise_compiler_error(python_unsupported_msg) %}\n {%- endif -%}\n\n {% set tmp_relation_exists = false %}\n {% if on_schema_change != 'ignore' or language == 'python' %}\n {#-- Check first, since otherwise we may not build a temp table --#}\n {#-- Python always needs to create a temp table --#}\n {%- call statement('create_tmp_relation', language=language) -%}\n {{ declare_dbt_max_partition(this, partition_by, compiled_code, language) +\n create_table_as(True, tmp_relation, compiled_code, language)\n }}\n {%- endcall -%}\n {% set tmp_relation_exists = true %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, tmp_relation, existing_relation) %}\n {% endif %}\n\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n {% set build_sql = bq_generate_incremental_build_sql(\n strategy, tmp_relation, target_relation, compiled_code, unique_key, partition_by, partitions, dest_columns, tmp_relation_exists\n ) %}\n\n {%- call statement('main') -%}\n {{ build_sql }}\n {% endcall %}\n\n {%- if language == 'python' and tmp_relation -%}\n {{ adapter.drop_relation(tmp_relation) }}\n {%- endif -%}\n\n {% endif %}\n\n {{ run_hooks(post_hooks) }}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -37687,7 +37687,7 @@ "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "name": "materialization_snapshot_default", - "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", + "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparison later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -38450,7 +38450,7 @@ "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "name": "materialization_incremental_default", - "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", + "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -38598,7 +38598,7 @@ "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "name": "materialization_table_default", - "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", + "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -38738,7 +38738,7 @@ "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "name": "materialization_view_default", - "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", + "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": { @@ -38959,7 +38959,7 @@ "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "name": "materialization_seed_default", - "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparision later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", + "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -39699,7 +39699,7 @@ "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "dates_in_range", - "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partiton start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", + "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": { @@ -46988,7 +46988,7 @@ "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "name": "default__width_bucket", - "macro_sql": "{% macro default__width_bucket(expr, min_value, max_value, num_buckets) -%}\n\n {% set bin_size -%}\n (( {{ max_value }} - {{ min_value }} ) / {{ num_buckets }} )\n {%- endset %}\n (\n -- to break ties when the amount is eaxtly at the bucket egde\n case\n when\n mod(\n {{ dbt.safe_cast(expr, dbt.type_numeric() ) }},\n {{ dbt.safe_cast(bin_size, dbt.type_numeric() ) }}\n ) = 0\n then 1\n else 0\n end\n ) +\n -- Anything over max_value goes the N+1 bucket\n least(\n ceil(\n ({{ expr }} - {{ min_value }})/{{ bin_size }}\n ),\n {{ num_buckets }} + 1\n )\n{%- endmacro %}", + "macro_sql": "{% macro default__width_bucket(expr, min_value, max_value, num_buckets) -%}\n\n {% set bin_size -%}\n (( {{ max_value }} - {{ min_value }} ) / {{ num_buckets }} )\n {%- endset %}\n (\n -- to break ties when the amount is eaxtly at the bucket edge\n case\n when\n mod(\n {{ dbt.safe_cast(expr, dbt.type_numeric() ) }},\n {{ dbt.safe_cast(bin_size, dbt.type_numeric() ) }}\n ) = 0\n then 1\n else 0\n end\n ) +\n -- Anything over max_value goes the N+1 bucket\n least(\n ceil(\n ({{ expr }} - {{ min_value }})/{{ bin_size }}\n ),\n {{ num_buckets }} + 1\n )\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": { diff --git a/tests/unit/helpers/test_jsonify.py b/tests/unit/helpers/test_jsonify.py index e81c703..e210722 100644 --- a/tests/unit/helpers/test_jsonify.py +++ b/tests/unit/helpers/test_jsonify.py @@ -20,7 +20,7 @@ class DummyData: class TestFile: @pytest.mark.parametrize( - "input, ouput", + "input, output", [ ('{"data":"dummy"}', dict({"data": "dummy"})), ( @@ -29,8 +29,8 @@ class TestFile: ), ], ) - def test_mask(self, input, ouput): - assert jsonify.mask(obj=input) == ouput + def test_mask(self, input, output): + assert jsonify.mask(obj=input) == output def test_mask_with_class(self): obj = Dummy(str="dummy", secret_str="this is a secret")