diff --git a/.github/workflows/ci_lint_package.yml b/.github/workflows/ci_lint_package.yml index 9f1508f3..e2d33b7d 100644 --- a/.github/workflows/ci_lint_package.yml +++ b/.github/workflows/ci_lint_package.yml @@ -50,7 +50,7 @@ jobs: architecture: "x64" - name: Install Python packages - run: python -m pip install dbt-snowflake~=1.6.0 sqlfluff-templater-dbt~=2.3.2 + run: python -m pip install dbt-snowflake~=1.8.0 sqlfluff-templater-dbt~=3.0.0 - name: Test database connection run: dbt debug diff --git a/.github/workflows/ci_test_package.yml b/.github/workflows/ci_test_package.yml index b02997ee..06c07e92 100644 --- a/.github/workflows/ci_test_package.yml +++ b/.github/workflows/ci_test_package.yml @@ -21,7 +21,7 @@ env: DBT_ENV_SECRET_DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} DBT_ENV_SECRET_GCP_PROJECT: ${{ secrets.GCP_PROJECT }} # Env var to test version - LAST_RELEASE_SUPPORTED_DBT_VERSION: 1_6_0 # A dbt version supported by both the last release and this one + LAST_RELEASE_SUPPORTED_DBT_VERSION: 1_8_0 # A dbt version supported by both the last release and this one # Env vars to test invocations model DBT_CLOUD_PROJECT_ID: 123 DBT_CLOUD_JOB_ID: ABC @@ -115,7 +115,7 @@ jobs: matrix: warehouse: ["snowflake", "bigquery", "postgres"] # When supporting a new version, update the list here - version: ["1_3_0", "1_4_0", "1_5_0", "1_6_0"] + version: ["1_3_0", "1_4_0", "1_5_0", "1_6_0", "1_7_0", "1_8_0"] runs-on: ubuntu-latest environment: name: Approve Integration Tests diff --git a/.github/workflows/main_lint_package.yml b/.github/workflows/main_lint_package.yml index 7747f565..bd691804 100644 --- a/.github/workflows/main_lint_package.yml +++ b/.github/workflows/main_lint_package.yml @@ -46,7 +46,7 @@ jobs: architecture: "x64" - name: Install Python packages - run: python -m pip install dbt-snowflake~=1.6.0 sqlfluff-templater-dbt~=2.3.2 + run: python -m pip install dbt-snowflake~=1.8.0 sqlfluff-templater-dbt~=3.0.0 - name: Test database connection run: dbt debug diff --git a/.github/workflows/main_test_package.yml b/.github/workflows/main_test_package.yml index 4398c459..12a9c957 100644 --- a/.github/workflows/main_test_package.yml +++ b/.github/workflows/main_test_package.yml @@ -35,7 +35,7 @@ jobs: strategy: matrix: warehouse: ["snowflake", "bigquery", "postgres"] - version: ["1_3_0", "1_4_0", "1_5_0", "1_6_0"] + version: ["1_3_0", "1_4_0", "1_5_0", "1_6_0", "1_7_0", "1_8_0"] runs-on: ubuntu-latest permissions: contents: "read" diff --git a/.github/workflows/publish_docs_on_release.yml b/.github/workflows/publish_docs_on_release.yml index d558a464..663486ff 100644 --- a/.github/workflows/publish_docs_on_release.yml +++ b/.github/workflows/publish_docs_on_release.yml @@ -39,7 +39,7 @@ jobs: uses: actions/checkout@v3 - name: Install Python packages - run: python -m pip install dbt-snowflake~=1.6.0 + run: python -m pip install dbt-snowflake~=1.8.0 - name: Test database connection run: dbt debug diff --git a/README.md b/README.md index 17688961..93c25f40 100644 --- a/README.md +++ b/README.md @@ -46,7 +46,7 @@ See the generated [dbt docs site](https://brooklyn-data.github.io/dbt_artifacts/ ``` packages: - package: brooklyn-data/dbt_artifacts - version: 2.6.0 + version: 2.6.4 ``` :construction_worker: Make sure to fix at least the **minor** version, to avoid issues when a new release is open. See the notes on upgrading below for more detail. diff --git a/dbt_project.yml b/dbt_project.yml index 4a031fa9..684efce6 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -1,7 +1,7 @@ name: "dbt_artifacts" -version: "2.6.0" +version: "2.6.4" config-version: 2 -require-dbt-version: [">=1.3.0", "<1.7.0"] +require-dbt-version: [">=1.3.0", "<1.9.0"] profile: "dbt_artifacts" clean-targets: # folders to be removed by `dbt clean` diff --git a/integration_test_project/example-env.sh b/integration_test_project/example-env.sh index 51450cf3..47cb0d67 100755 --- a/integration_test_project/example-env.sh +++ b/integration_test_project/example-env.sh @@ -18,6 +18,7 @@ export DBT_ENV_SPARK_DRIVER_PATH= # /Library/simba/spark/lib/libsparkodbc_sbu.dy export DBT_ENV_SPARK_ENDPOINT= # The endpoint ID from the Databricks HTTP path # dbt environment variables, change these +export DBT_VERSION="1_5_0" export DBT_CLOUD_PROJECT_ID= export DBT_CLOUD_JOB_ID= export DBT_CLOUD_RUN_ID= diff --git a/macros/upload_individual_datasets/upload_models.sql b/macros/upload_individual_datasets/upload_models.sql index d8dc49fd..7570b06a 100644 --- a/macros/upload_individual_datasets/upload_models.sql +++ b/macros/upload_individual_datasets/upload_models.sql @@ -24,26 +24,27 @@ {{ adapter.dispatch('parse_json', 'dbt_artifacts')(adapter.dispatch('column_identifier', 'dbt_artifacts')(15)) }} from values {% for model in models -%} - {% do model.pop('raw_code', None) %} + {% set model_copy = model.copy() -%} + {% do model_copy.pop('raw_code', None) %} ( '{{ invocation_id }}', {# command_invocation_id #} - '{{ model.unique_id }}', {# node_id #} + '{{ model_copy.unique_id }}', {# node_id #} '{{ run_started_at }}', {# run_started_at #} - '{{ model.database }}', {# database #} - '{{ model.schema }}', {# schema #} - '{{ model.name }}', {# name #} - '{{ tojson(model.depends_on.nodes) | replace('\\', '\\\\') }}', {# depends_on_nodes #} - '{{ model.package_name }}', {# package_name #} - '{{ model.original_file_path | replace('\\', '\\\\') }}', {# path #} - '{{ model.checksum.checksum | replace('\\', '\\\\') }}', {# checksum #} - '{{ model.config.materialized }}', {# materialization #} - '{{ tojson(model.tags) }}', {# tags #} - '{{ tojson(model.config.meta) | replace("\\", "\\\\") | replace("'","\\'") | replace('"', '\\"') }}', {# meta #} - '{{ model.alias }}', {# alias #} + '{{ model_copy.database }}', {# database #} + '{{ model_copy.schema }}', {# schema #} + '{{ model_copy.name }}', {# name #} + '{{ tojson(model_copy.depends_on.nodes) | replace('\\', '\\\\') }}', {# depends_on_nodes #} + '{{ model_copy.package_name }}', {# package_name #} + '{{ model_copy.original_file_path | replace('\\', '\\\\') }}', {# path #} + '{{ model_copy.checksum.checksum | replace('\\', '\\\\') }}', {# checksum #} + '{{ model_copy.config.materialized }}', {# materialization #} + '{{ tojson(model_copy.tags) }}', {# tags #} + '{{ tojson(model_copy.config.meta) | replace("\\", "\\\\") | replace("'","\\'") | replace('"', '\\"') }}', {# meta #} + '{{ model_copy.alias }}', {# alias #} {% if var('dbt_artifacts_exclude_all_results', false) %} null {% else %} - '{{ tojson(model) | replace("\\", "\\\\") | replace("'","\\'") | replace('"', '\\"') }}' {# all_results #} + '{{ tojson(model_copy) | replace("\\", "\\\\") | replace("'","\\'") | replace('"', '\\"') }}' {# all_results #} {% endif %} ) {%- if not loop.last %},{%- endif %} @@ -59,26 +60,27 @@ {% if models != [] %} {% set model_values %} {% for model in models -%} - {% do model.pop('raw_code', None) %} + {% set model_copy = model.copy() -%} + {% do model_copy.pop('raw_code', None) %} ( '{{ invocation_id }}', {# command_invocation_id #} - '{{ model.unique_id }}', {# node_id #} + '{{ model_copy.unique_id }}', {# node_id #} '{{ run_started_at }}', {# run_started_at #} - '{{ model.database }}', {# database #} - '{{ model.schema }}', {# schema #} - '{{ model.name }}', {# name #} - {{ tojson(model.depends_on.nodes) }}, {# depends_on_nodes #} - '{{ model.package_name }}', {# package_name #} - '{{ model.original_file_path | replace('\\', '\\\\') }}', {# path #} - '{{ model.checksum.checksum | replace('\\', '\\\\') }}', {# checksum #} - '{{ model.config.materialized }}', {# materialization #} - {{ tojson(model.tags) }}, {# tags #} - {{ adapter.dispatch('parse_json', 'dbt_artifacts')(tojson(model.config.meta)) }}, {# meta #} - '{{ model.alias }}', {# alias #} + '{{ model_copy.database }}', {# database #} + '{{ model_copy.schema }}', {# schema #} + '{{ model_copy.name }}', {# name #} + {{ tojson(model_copy.depends_on.nodes) }}, {# depends_on_nodes #} + '{{ model_copy.package_name }}', {# package_name #} + '{{ model_copy.original_file_path | replace('\\', '\\\\') }}', {# path #} + '{{ model_copy.checksum.checksum | replace('\\', '\\\\') }}', {# checksum #} + '{{ model_copy.config.materialized }}', {# materialization #} + {{ tojson(model_copy.tags) }}, {# tags #} + {{ adapter.dispatch('parse_json', 'dbt_artifacts')(tojson(model_copy.config.meta)) }}, {# meta #} + '{{ model_copy.alias }}', {# alias #} {% if var('dbt_artifacts_exclude_all_results', false) %} null {% else %} - {{ adapter.dispatch('parse_json', 'dbt_artifacts')(tojson(model) | replace("\\", "\\\\") | replace("'","\\'") | replace('"', '\\"')) }} {# all_results #} + {{ adapter.dispatch('parse_json', 'dbt_artifacts')(tojson(model_copy) | replace("\\", "\\\\") | replace("'","\\'") | replace('"', '\\"')) }} {# all_results #} {% endif %} ) {%- if not loop.last %},{%- endif %} @@ -94,26 +96,27 @@ {% if models != [] %} {% set model_values %} {% for model in models -%} - {% do model.pop('raw_code', None) %} + {% set model_copy = model.copy() -%} + {% do model_copy.pop('raw_code', None) %} ( '{{ invocation_id }}', {# command_invocation_id #} - '{{ model.unique_id }}', {# node_id #} + '{{ model_copy.unique_id }}', {# node_id #} '{{ run_started_at }}', {# run_started_at #} - '{{ model.database }}', {# database #} - '{{ model.schema }}', {# schema #} - '{{ model.name }}', {# name #} - '{{ tojson(model.depends_on.nodes) }}', {# depends_on_nodes #} - '{{ model.package_name }}', {# package_name #} - $${{ model.original_file_path | replace('\\', '\\\\') }}$$, {# path #} - '{{ model.checksum.checksum }}', {# checksum #} - '{{ model.config.materialized }}', {# materialization #} - '{{ tojson(model.tags) }}', {# tags #} - $${{ model.config.meta }}$$, {# meta #} - '{{ model.alias }}', {# alias #} + '{{ model_copy.database }}', {# database #} + '{{ model_copy.schema }}', {# schema #} + '{{ model_copy.name }}', {# name #} + '{{ tojson(model_copy.depends_on.nodes) }}', {# depends_on_nodes #} + '{{ model_copy.package_name }}', {# package_name #} + $${{ model_copy.original_file_path | replace('\\', '\\\\') }}$$, {# path #} + '{{ model_copy.checksum.checksum }}', {# checksum #} + '{{ model_copy.config.materialized }}', {# materialization #} + '{{ tojson(model_copy.tags) }}', {# tags #} + $${{ model_copy.config.meta }}$$, {# meta #} + '{{ model_copy.alias }}', {# alias #} {% if var('dbt_artifacts_exclude_all_results', false) %} null {% else %} - $${{ tojson(model) }}$$ {# all_results #} + $${{ tojson(model_copy) }}$$ {# all_results #} {% endif %} ) {%- if not loop.last %},{%- endif %} diff --git a/macros/upload_individual_datasets/upload_seed_executions.sql b/macros/upload_individual_datasets/upload_seed_executions.sql index ca947ac2..1ccbfe2a 100644 --- a/macros/upload_individual_datasets/upload_seed_executions.sql +++ b/macros/upload_individual_datasets/upload_seed_executions.sql @@ -38,26 +38,10 @@ '{{ model.thread_id }}', {# thread_id #} '{{ model.status }}', {# status #} - {% if model.timing != [] %} - {% for stage in model.timing if stage.name == "compile" %} - {% if loop.length == 0 %} - null, {# compile_started_at #} - {% else %} - '{{ stage.started_at }}', {# compile_started_at #} - {% endif %} - {% endfor %} - - {% for stage in model.timing if stage.name == "execute" %} - {% if loop.length == 0 %} - null, {# query_completed_at #} - {% else %} - '{{ stage.completed_at }}', {# query_completed_at #} - {% endif %} - {% endfor %} - {% else %} - null, {# compile_started_at #} - null, {# query_completed_at #} - {% endif %} + {% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} + {% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} {{ model.execution_time }}, {# total_node_runtime #} null, -- rows_affected not available {# Only available in Snowflake #} @@ -95,26 +79,10 @@ '{{ model.thread_id }}', {# thread_id #} '{{ model.status }}', {# status #} - {% if model.timing != [] %} - {% for stage in model.timing if stage.name == "compile" %} - {% if loop.length == 0 %} - null, {# compile_started_at #} - {% else %} - '{{ stage.started_at }}', {# compile_started_at #} - {% endif %} - {% endfor %} - - {% for stage in model.timing if stage.name == "execute" %} - {% if loop.length == 0 %} - null, {# query_completed_at #} - {% else %} - '{{ stage.completed_at }}', {# query_completed_at #} - {% endif %} - {% endfor %} - {% else %} - null, {# compile_started_at #} - null, {# query_completed_at #} - {% endif %} + {% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} + {% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} {{ model.execution_time }}, {# total_node_runtime #} null, -- rows_affected not available {# Databricks #} @@ -170,26 +138,10 @@ '{{ model.thread_id }}', {# thread_id #} '{{ model.status }}', {# status #} - {% if model.timing != [] %} - {% for stage in model.timing if stage.name == "compile" %} - {% if loop.length == 0 %} - null, {# compile_started_at #} - {% else %} - '{{ stage.started_at }}', {# compile_started_at #} - {% endif %} - {% endfor %} - - {% for stage in model.timing if stage.name == "execute" %} - {% if loop.length == 0 %} - null, {# query_completed_at #} - {% else %} - '{{ stage.completed_at }}', {# query_completed_at #} - {% endif %} - {% endfor %} - {% else %} - null, {# compile_started_at #} - null, {# query_completed_at #} - {% endif %} + {% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} + {% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} {{ model.execution_time }}, {# total_node_runtime #} try_cast('{{ model.adapter_response.rows_affected }}' as int), {# rows_affected #} diff --git a/macros/upload_individual_datasets/upload_snapshot_executions.sql b/macros/upload_individual_datasets/upload_snapshot_executions.sql index 369348cd..2006b168 100644 --- a/macros/upload_individual_datasets/upload_snapshot_executions.sql +++ b/macros/upload_individual_datasets/upload_snapshot_executions.sql @@ -38,26 +38,10 @@ '{{ model.thread_id }}', {# thread_id #} '{{ model.status }}', {# status #} - {% if model.timing != [] %} - {% for stage in model.timing if stage.name == "compile" %} - {% if loop.length == 0 %} - null, {# compile_started_at #} - {% else %} - '{{ stage.started_at }}', {# compile_started_at #} - {% endif %} - {% endfor %} - - {% for stage in model.timing if stage.name == "execute" %} - {% if loop.length == 0 %} - null, {# query_completed_at #} - {% else %} - '{{ stage.completed_at }}', {# query_completed_at #} - {% endif %} - {% endfor %} - {% else %} - null, {# compile_started_at #} - null, {# query_completed_at #} - {% endif %} + {% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} + {% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} {{ model.execution_time }}, {# total_node_runtime #} null, -- rows_affected not available {# Only available in Snowflake #} @@ -95,26 +79,10 @@ '{{ model.thread_id }}', {# thread_id #} '{{ model.status }}', {# status #} - {% if model.timing != [] %} - {% for stage in model.timing if stage.name == "compile" %} - {% if loop.length == 0 %} - null, {# compile_started_at #} - {% else %} - '{{ stage.started_at }}', {# compile_started_at #} - {% endif %} - {% endfor %} - - {% for stage in model.timing if stage.name == "execute" %} - {% if loop.length == 0 %} - null, {# query_completed_at #} - {% else %} - '{{ stage.completed_at }}', {# query_completed_at #} - {% endif %} - {% endfor %} - {% else %} - null, {# compile_started_at #} - null, {# query_completed_at #} - {% endif %} + {% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} + {% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} {{ model.execution_time }}, {# total_node_runtime #} null, -- rows_affected not available {# Databricks #} @@ -170,26 +138,10 @@ '{{ model.thread_id }}', {# thread_id #} '{{ model.status }}', {# status #} - {% if model.timing != [] %} - {% for stage in model.timing if stage.name == "compile" %} - {% if loop.length == 0 %} - null, {# compile_started_at #} - {% else %} - '{{ stage.started_at }}', {# compile_started_at #} - {% endif %} - {% endfor %} - - {% for stage in model.timing if stage.name == "execute" %} - {% if loop.length == 0 %} - null, {# query_completed_at #} - {% else %} - '{{ stage.completed_at }}', {# query_completed_at #} - {% endif %} - {% endfor %} - {% else %} - null, {# compile_started_at #} - null, {# query_completed_at #} - {% endif %} + {% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} + {% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} {{ model.execution_time }}, {# total_node_runtime #} try_cast('{{ model.adapter_response.rows_affected }}' as int), {# rows_affected #} diff --git a/macros/upload_individual_datasets/upload_test_executions.sql b/macros/upload_individual_datasets/upload_test_executions.sql index a42b03ba..cb13288b 100644 --- a/macros/upload_individual_datasets/upload_test_executions.sql +++ b/macros/upload_individual_datasets/upload_test_executions.sql @@ -35,26 +35,10 @@ '{{ test.thread_id }}', {# thread_id #} '{{ test.status }}', {# status #} - {% if test.timing != [] %} - {% for stage in test.timing if stage.name == "compile" %} - {% if loop.length == 0 %} - null, {# compile_started_at #} - {% else %} - '{{ stage.started_at }}', {# compile_started_at #} - {% endif %} - {% endfor %} - - {% for stage in test.timing if stage.name == "execute" %} - {% if loop.length == 0 %} - null, {# query_completed_at #} - {% else %} - '{{ stage.completed_at }}', {# query_completed_at #} - {% endif %} - {% endfor %} - {% else %} - null, {# compile_started_at #} - null, {# query_completed_at #} - {% endif %} + {% set compile_started_at = (test.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} + {% set query_completed_at = (test.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} {{ test.execution_time }}, {# total_node_runtime #} null, {# rows_affected not available in Databricks #} @@ -89,26 +73,10 @@ '{{ test.thread_id }}', {# thread_id #} '{{ test.status }}', {# status #} - {% if test.timing != [] %} - {% for stage in test.timing if stage.name == "compile" %} - {% if loop.length == 0 %} - null, {# compile_started_at #} - {% else %} - '{{ stage.started_at }}', {# compile_started_at #} - {% endif %} - {% endfor %} - - {% for stage in test.timing if stage.name == "execute" %} - {% if loop.length == 0 %} - null, {# query_completed_at #} - {% else %} - '{{ stage.completed_at }}', {# query_completed_at #} - {% endif %} - {% endfor %} - {% else %} - null, {# compile_started_at #} - null, {# query_completed_at #} - {% endif %} + {% set compile_started_at = (test.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} + {% set query_completed_at = (test.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} {{ test.execution_time }}, {# total_node_runtime #} null, {# rows_affected not available in Databricks #} @@ -180,3 +148,56 @@ {{ return("") }} {% endif %} {% endmacro -%} + +{% macro snowflake__get_test_executions_dml_sql(tests) -%} + {% if tests != [] %} + {% set test_execution_values %} + select + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(1) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(2) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(3) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(4) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(5) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(6) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(7) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(8) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(9) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(10) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(11) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(12) }}, + {{ adapter.dispatch('parse_json', 'dbt_artifacts')(adapter.dispatch('column_identifier', 'dbt_artifacts')(13)) }} + from values + {% for test in tests -%} + ( + '{{ invocation_id }}', {# command_invocation_id #} + '{{ test.node.unique_id }}', {# node_id #} + '{{ run_started_at }}', {# run_started_at #} + + {% set config_full_refresh = test.node.config.full_refresh %} + {% if config_full_refresh is none %} + {% set config_full_refresh = flags.FULL_REFRESH %} + {% endif %} + '{{ config_full_refresh }}', {# was_full_refresh #} + + '{{ test.thread_id }}', {# thread_id #} + '{{ test.status }}', {# status #} + + {% set compile_started_at = (test.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} + {% set query_completed_at = (test.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} + + {{ test.execution_time }}, {# total_node_runtime #} + try_cast('{{ test.adapter_response.rows_affected }}' as int), {# rows_affected #} + {{ 'null' if test.failures is none else test.failures }}, {# failures #} + '{{ test.message | replace("\\", "\\\\") | replace("'", "\\'") | replace('"', '\\"') }}', {# message #} + '{{ tojson(test.adapter_response) | replace("\\", "\\\\") | replace("'", "\\'") | replace('"', '\\"') }}' {# adapter_response #} + ) + {%- if not loop.last %},{%- endif %} + {%- endfor %} + {% endset %} + {{ test_execution_values }} + {% else %} + {{ return("") }} + {% endif %} +{% endmacro -%} \ No newline at end of file diff --git a/macros/upload_results/get_column_name_lists.sql b/macros/upload_results/get_column_name_lists.sql index ea708467..7911e866 100644 --- a/macros/upload_results/get_column_name_lists.sql +++ b/macros/upload_results/get_column_name_lists.sql @@ -52,8 +52,8 @@ {% elif dataset == 'model_executions' %} ( - node_id, command_invocation_id, + node_id, run_started_at, was_full_refresh, thread_id, @@ -76,8 +76,8 @@ {% elif dataset == 'models' %} ( - node_id, command_invocation_id, + node_id, run_started_at, database, schema, diff --git a/macros/upload_results/upload_results.sql b/macros/upload_results/upload_results.sql index fcadc199..114a667d 100644 --- a/macros/upload_results/upload_results.sql +++ b/macros/upload_results/upload_results.sql @@ -18,8 +18,8 @@ {# Get the results that need to be uploaded #} {% set objects = dbt_artifacts.get_dataset_content(dataset) %} - {# Upload in chunks to reduce query size #} - {% if dataset == 'model' %} + {# Upload in chunks to reduce the query size #} + {% if dataset == 'models' %} {% set upload_limit = 50 if target.type == 'bigquery' else 100 %} {% else %} {% set upload_limit = 300 if target.type == 'bigquery' else 5000 %} diff --git a/tox.ini b/tox.ini index 0b4e2403..aee9fc2b 100644 --- a/tox.ini +++ b/tox.ini @@ -35,7 +35,7 @@ rules = LT01,LT03,CP01,AL01,AL02,CP02,ST08,LT06,LT07,AM01,LT08,AL05,RF02,RF03,CP deps = sqlfluff-templater-dbt~=2.0.2 - dbt-snowflake~=1.6.0 + dbt-snowflake~=1.8.0 [sqlfluff:indentation] indent_unit = space @@ -113,13 +113,13 @@ commands = sqlfluff fix models --ignore parsing # Generate docs [testenv:generate_docs] -deps = dbt-snowflake~=1.6.0 +deps = dbt-snowflake~=1.8.0 commands = dbt docs generate --profiles-dir integration_test_project # Snowflake integration tests [testenv:integration_snowflake] changedir = integration_test_project -deps = dbt-snowflake~=1.6.0 +deps = dbt-snowflake~=1.8.0 commands = dbt clean dbt deps @@ -158,10 +158,26 @@ commands = dbt deps dbt build --target snowflake +[testenv:integration_snowflake_1_7_0] +changedir = integration_test_project +deps = dbt-snowflake~=1.7.0 +commands = + dbt clean + dbt deps + dbt build --target snowflake + +[testenv:integration_snowflake_1_8_0] +changedir = integration_test_project +deps = dbt-snowflake~=1.8.0 +commands = + dbt clean + dbt deps + dbt build --target snowflake + # Databricks integration tests [testenv:integration_databricks] changedir = integration_test_project -deps = dbt-databricks~=1.6.0 +deps = dbt-databricks~=1.8.0 commands = dbt clean dbt deps @@ -199,10 +215,26 @@ commands = dbt deps dbt build --target databricks +[testenv:integration_databricks_1_7_0] +changedir = integration_test_project +deps = dbt-databricks~=1.7.0 +commands = + dbt clean + dbt deps + dbt build --target databricks + +[testenv:integration_databricks_1_8_0] +changedir = integration_test_project +deps = dbt-databricks~=1.8.0 +commands = + dbt clean + dbt deps + dbt build --target databricks + # Bigquery integration tests [testenv:integration_bigquery] changedir = integration_test_project -deps = dbt-bigquery~=1.6.0 +deps = dbt-bigquery~=1.8.0 commands = dbt clean dbt deps @@ -240,6 +272,22 @@ commands = dbt deps dbt build --target bigquery --vars '"my_var": "my value"' +[testenv:integration_bigquery_1_7_0] +changedir = integration_test_project +deps = dbt-bigquery~=1.7.0 +commands = + dbt clean + dbt deps + dbt build --target bigquery --vars '"my_var": "my value"' + +[testenv:integration_bigquery_1_8_0] +changedir = integration_test_project +deps = dbt-bigquery~=1.8.0 +commands = + dbt clean + dbt deps + dbt build --target bigquery --vars '"my_var": "my value"' + # Spark integration test (disabled) [testenv:integration_spark] changedir = integration_test_project @@ -251,7 +299,7 @@ commands = [testenv:integration_postgres] changedir = integration_test_project -deps = dbt-postgres~=1.6.0 +deps = dbt-postgres~=1.8.0 commands = dbt clean dbt deps @@ -289,3 +337,20 @@ commands = dbt deps dbt build --target postgres +[testenv:integration_postgres_1_7_0] +changedir = integration_test_project +deps = dbt-postgres~=1.7.0 +commands = + dbt clean + dbt deps + dbt build --target postgres + +[testenv:integration_postgres_1_8_0] +changedir = integration_test_project +deps = dbt-postgres~=1.7.0 +commands = + dbt clean + dbt deps + dbt build --target postgres + +