Skip to content

Commit

Permalink
Merge pull request #10 from fishtown-analytics/feature/get-relations-by
Browse files Browse the repository at this point in the history
Some dbt_utils.get_relations_by_* macros
  • Loading branch information
jtcohen6 authored Apr 15, 2021
2 parents 0fdc775 + 438ab30 commit c380649
Show file tree
Hide file tree
Showing 5 changed files with 45 additions and 8 deletions.
2 changes: 1 addition & 1 deletion .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ jobs:
python3 -m venv venv
. venv/bin/activate
pip install --upgrade pip setuptools
pip install dbt-spark
pip install dbt-spark[PyHive]
mkdir -p ~/.dbt
cp integration_tests/ci/sample.profiles.yml ~/.dbt/profiles.yml
Expand Down
1 change: 0 additions & 1 deletion .gitmodules
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,3 @@
[submodule "snowplow"]
path = snowplow
url = https://github.com/fishtown-analytics/snowplow
branch = refactor/int-test-org
2 changes: 1 addition & 1 deletion dbt-utils
7 changes: 2 additions & 5 deletions integration_tests/dbt_utils/dbt_project.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,20 +17,17 @@ clean-targets: # directories to be removed by `dbt clean`
- "dbt_modules"

vars:
dbt_utils_dispatch_list: ['spark_utils']
dbt_utils_dispatch_list: ['spark_utils', 'dbt_utils_integration_tests']

models:
dbt_utils_integration_tests:
sql:
# macro doesn't work
# macro doesn't work for this integration test (schema pattern)
test_get_relations_by_pattern:
+enabled: false
test_get_relations_by_prefix_and_union:
+enabled: false
# integration test doesn't work
test_groupby:
+enabled: false

schema_tests:
# integration test doesn't work
test_recency:
Expand Down
41 changes: 41 additions & 0 deletions macros/dbt_utils/sql/get_relations_by_prefix.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
{% macro spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}

{%- call statement('get_tables', fetch_result=True) %}

show table extended in {{ schema_pattern }} like '{{ table_pattern }}'

{%- endcall -%}

{%- set table_list = load_result('get_tables') -%}

{%- if table_list and table_list['table'] -%}
{%- set tbl_relations = [] -%}
{%- for row in table_list['table'] -%}
{%- set tbl_relation = api.Relation.create(
database=None,
schema=row[0],
identifier=row[1],
type=('view' if 'Type: VIEW' in row[3] else 'table')
) -%}
{%- do tbl_relations.append(tbl_relation) -%}
{%- endfor -%}

{{ return(tbl_relations) }}
{%- else -%}
{{ return([]) }}
{%- endif -%}

{% endmacro %}

{% macro spark__get_relations_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database) %}
{% set table_pattern = table_pattern ~ '*' %}
{{ return(spark_utils.spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database)) }}
{% endmacro %}

{% macro spark__get_tables_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}
{{ return(spark_utils.spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database)) }}
{% endmacro %}

{% macro spark__get_tables_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database) %}
{{ return(spark_utils.spark__get_relations_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database)) }}
{% endmacro %}

0 comments on commit c380649

Please sign in to comment.