diff --git a/Makefile b/Makefile index 2a695b8..e68f6e5 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,21 @@ SHELL := /bin/bash +# set default target +DBT_TARGET ?= sbx + dbt-console: docker-compose run dbt_console -.PHONY: dbt-console \ No newline at end of file +.PHONY: dbt-console + +sl-flow-api: + dbt run-operation create_aws_flow_api \ + --profile flow \ + --target $(DBT_TARGET) \ + --profiles-dir ~/.dbt/ + +udfs: + dbt run-operation create_udf_get_chainhead \ + --profile flow \ + --target $(DBT_TARGET) \ + --profiles-dir ~/.dbt/ \ No newline at end of file diff --git a/macros/streamline/api_integrations.sql b/macros/streamline/api_integrations.sql new file mode 100644 index 0000000..37d8a16 --- /dev/null +++ b/macros/streamline/api_integrations.sql @@ -0,0 +1,58 @@ +-- macro used to create flow api integrations +{% macro create_aws_flow_api() %} + {{ log("Creating integration for target:" ~ target, info=True) }} + {% if target.name == "prod" %} + {% set sql %} + CREATE api integration IF NOT EXISTS aws_flow_api_prod api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/snowflake-api-flow' api_allowed_prefixes = ( + 'https:///prod/' + ) enabled = TRUE; + {% endset %} + {% do run_query(sql) %} + {% elif target.name == "dev" %} + {% set sql %} + CREATE api integration IF NOT EXISTS aws_flow_api_dev api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/snowflake-api-flow' api_allowed_prefixes = ( + 'https:///dev/' + ) enabled = TRUE; + {% endset %} + {% do run_query(sql) %} + {% elif target.name == "sbx" %} + {{ log("Generating api integration for target:" ~ target.name, info=True) }} + {% set sql %} + CREATE api integration IF NOT EXISTS aws_flow_api_sbx api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::579011195466:role/flow-api-sbx-rolesnowflakeudfsAF733095-1R9BM6QXEKD5O' api_allowed_prefixes = ( + 'https://bc5ejedoq8.execute-api.us-east-1.amazonaws.com/sbx' + ) enabled = TRUE; + {% endset %} + -- {% do run_query(sql) %} + {% set query_result = run_and_log_sql(sql) %} + {% endif %} +{% endmacro %} + +-- macro used to run a sql query and log the results +-- TODO: Move this to fsc-utils package +{% macro run_and_log_sql(sql_query, log_level='info') %} + {% set result_var = 'result_' ~ sql_query[:8] %} + + {% set log_message = 'Executing SQL query: ' ~ sql_query %} + {% do log(log_message,info=True) %} + + {% set query_result = run_query(sql_query) %} + {% set result_str = query_result.columns[0].values()[0] if query_result.columns else None %} + + {% set log_message = 'SQL query result: ' ~ result_str %} + {% do log(log_message, info=True) %} + + {{ result_var }} +{% endmacro %} + +-- macro used to select priveleges on all views/tables in a target chema to a role +{% macro grant_select(role) %} + {{ log("Granting privileges to role: " ~ role, info=True) }} + {% set sql %} + grant usage on schema {{ target.schema }} to role {{ role }}; + grant select on all tables in schema {{ target.schema }} to role {{ role }}; + grant select on all views in schema {{ target.schema }} to role {{ role }}; + {% endset %} + + {% do run_query(sql) %} + {% do log("Privileges granted", info=True) %} +{% endmacro %} \ No newline at end of file diff --git a/macros/streamline/get_base_table_udft.sql b/macros/streamline/get_base_table_udft.sql new file mode 100644 index 0000000..9dbe2ff --- /dev/null +++ b/macros/streamline/get_base_table_udft.sql @@ -0,0 +1,23 @@ +{% macro create_udtf_get_base_table(schema) %} +create or replace function {{ schema }}.udtf_get_base_table(max_height integer) +returns table (height number) +as +$$ + with base as ( + select + row_number() over ( + order by + seq4() + ) as id + from + table(generator(rowcount => 1000000000)) + ) +select + id as height +from + base +where + id <= max_height +$$ +; +{% endmacro %} \ No newline at end of file diff --git a/macros/streamline/streamline_udfs.sql b/macros/streamline/streamline_udfs.sql new file mode 100644 index 0000000..c8563dd --- /dev/null +++ b/macros/streamline/streamline_udfs.sql @@ -0,0 +1,14 @@ +{% macro create_udf_get_chainhead() %} + {{ log("Creating udf get_chainhead for target:" ~ target.name ~ ", schema: " ~ target.schema, info=True) }} + {{ log("role:" ~ target.role ~ ", user:" ~ target.user, info=True) }} + CREATE + OR REPLACE EXTERNAL FUNCTION streamline.udf_get_chainhead() returns variant api_integration = + {% if target.name == "prod" %} + aws_flow_api AS '/prod/get_chainhead' + {% elif target.name == "dev" %} + aws_flow_api_dev AS '/dev/get_chainhead' + {% elif target.name == "sbx" %} + {{ log("Creating sbx get_chainhead", info=True) }} + aws_flow_api_sbx AS 'https://bc5ejedoq8.execute-api.us-east-1.amazonaws.com/sbx/get_chainhead' + {%- endif %}; +{% endmacro %} diff --git a/models/streamline/README.md b/models/streamline/README.md new file mode 100644 index 0000000..ee4bb00 --- /dev/null +++ b/models/streamline/README.md @@ -0,0 +1,17 @@ +# Setup Snowflake Api Integration & UDFS + +## Setup Snowflake Api Integration + +Use the [create_aws_flow_api()](../../macros/streamline/api_integrations.sql#2) macro to create the `streamline-flow` Snowflake API integration. + +The + +```zsh +DBT_TARGET=sbx make sl-flow-api + +# This runs: +# dbt run-operation create_aws_flow_api \ +# --profile flow \ +# --target $(DBT_TARGET) \ +# --profiles-dir ~/.dbt/ +``` \ No newline at end of file