From cf32544cb156b80317bdd8eb5e12bddac1b0b1c9 Mon Sep 17 00:00:00 2001 From: austin Date: Thu, 16 Mar 2023 13:46:38 -0400 Subject: [PATCH 01/23] stash --- dbt_project.yml | 4 +- macros/create_sps.sql | 2 +- macros/run_sp_create_prod_clone.sql | 8 +-- macros/streamline/api_integrations.sql | 11 ---- macros/streamline/configs.yaml.sql | 75 -------------------------- macros/streamline/streamline_udfs.sql | 22 -------- models/sources.yml | 2 +- profiles.yml | 2 +- 8 files changed, 9 insertions(+), 117 deletions(-) delete mode 100644 macros/streamline/api_integrations.sql delete mode 100644 macros/streamline/streamline_udfs.sql diff --git a/dbt_project.yml b/dbt_project.yml index 4257307..fb47932 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -1,12 +1,12 @@ # Name your project! Project names should contain only lowercase characters # and underscores. A good package name should reflect your organization's # name or the intended use of these models -name: "livequery_models" +name: "reference_models" version: "1.0.0" config-version: 2 # This setting configures which "profile" dbt uses for this project. -profile: "livequery" +profile: "reference" # These configurations specify where dbt should look for different types of files. # The `source-paths` config, for example, states that models in this project can be diff --git a/macros/create_sps.sql b/macros/create_sps.sql index 2404546..ea91fdd 100644 --- a/macros/create_sps.sql +++ b/macros/create_sps.sql @@ -1,6 +1,6 @@ {% macro create_sps() %} {% if var("UPDATE_UDFS_AND_SPS") %} - {% if target.database == 'LIVEQUERY' %} + {% if target.database == 'REFERENCE' %} CREATE schema IF NOT EXISTS _internal; {{ sp_create_prod_clone('_internal') }}; {% endif %} diff --git a/macros/run_sp_create_prod_clone.sql b/macros/run_sp_create_prod_clone.sql index f2c23fb..19d9e7e 100644 --- a/macros/run_sp_create_prod_clone.sql +++ b/macros/run_sp_create_prod_clone.sql @@ -1,9 +1,9 @@ {% macro run_sp_create_prod_clone() %} {% set clone_query %} - call livequery._internal.create_prod_clone( - 'livequery', - 'livequery_dev', - 'livequery_dev_owner' + call reference._internal.create_prod_clone( + 'reference', + 'reference_dev', + 'reference_dev_owner' ); {% endset %} {% do run_query(clone_query) %} diff --git a/macros/streamline/api_integrations.sql b/macros/streamline/api_integrations.sql deleted file mode 100644 index f6d2d38..0000000 --- a/macros/streamline/api_integrations.sql +++ /dev/null @@ -1,11 +0,0 @@ -{% macro create_aws_ethereum_api() %} - {% if target.name == "prod" %} - {% set sql %} - CREATE api integration IF NOT EXISTS aws_ethereum_api api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::661245089684:role/snowflake-api-ethereum' api_allowed_prefixes = ( - 'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/', - 'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/' - ) enabled = TRUE; -{% endset %} - {% do run_query(sql) %} - {% endif %} -{% endmacro %} diff --git a/macros/streamline/configs.yaml.sql b/macros/streamline/configs.yaml.sql index 59d0949..471371e 100644 --- a/macros/streamline/configs.yaml.sql +++ b/macros/streamline/configs.yaml.sql @@ -3,44 +3,6 @@ {# UTILITY SCHEMA #} -- name: _utils.udf_introspect - signature: - - [echo, STRING] - func_type: SECURE EXTERNAL - return_type: TEXT - api_integration: '{{ var("API_INTEGRATION") }}' - sql: introspect - - -- name: _utils.udf_whoami - signature: [] - func_type: SECURE - return_type: TEXT - options: NOT NULL STRICT IMMUTABLE MEMOIZABLE - sql: | - SELECT - COALESCE(SPLIT_PART(GETVARIABLE('QUERY_TAG_SESSION'), ',',2), CURRENT_USER()) - -- name: _utils.udf_register_secret - signature: - - [request_id, STRING] - - [user_id, STRING] - - [key, STRING] - return_type: TEXT - func_type: SECURE EXTERNAL - api_integration: '{{ var("API_INTEGRATION") }}' - options: NOT NULL STRICT - sql: secret/register -- name: utils.udf_register_secret - signature: - - [request_id, STRING] - - [key, STRING] - func_type: SECURE - return_type: TEXT - options: NOT NULL STRICT IMMUTABLE - sql: | - SELECT - _utils.UDF_REGISTER_SECRET(REQUEST_ID, _utils.UDF_WHOAMI(), KEY) - name: utils.udf_hex_to_int signature: @@ -82,42 +44,5 @@ try_hex_decode_string(hex), '[\x00-\x1F\x7F-\x9F\xAD]', '', 1)) -{# - LIVE SCHEMA -#} -- name: _live.udf_api - signature: - - [method, STRING] - - [url, STRING] - - [headers, OBJECT] - - [DATA, OBJECT] - - [user_id, STRING] - - [SECRET, STRING] - return_type: VARIANT - func_type: SECURE EXTERNAL - api_integration: '{{ var("API_INTEGRATION") }}' - options: NOT NULL STRICT - sql: udf_api -- name: live.udf_api - signature: - - [method, STRING] - - [url, STRING] - - [headers, OBJECT] - - [data, OBJECT] - - [secret_name, STRING] - return_type: VARIANT - func_type: SECURE - options: NOT NULL STRICT VOLATILE - sql: | - SELECT - _live.UDF_API( - method, - url, - headers, - data, - _utils.UDF_WHOAMI(), - secret_name - ) - {% endmacro %} diff --git a/macros/streamline/streamline_udfs.sql b/macros/streamline/streamline_udfs.sql deleted file mode 100644 index 4381de6..0000000 --- a/macros/streamline/streamline_udfs.sql +++ /dev/null @@ -1,22 +0,0 @@ -{% macro create_udf_introspect( - drop_ = False - ) %} - {% set name_ = 'silver.udf_introspect' %} - {% set signature = [('json', 'variant')] %} - {% set return_type = 'text' %} - {% set sql_ = construct_api_route("introspect") %} - {% if not drop_ %} - {{ create_sql_function( - name_ = name_, - signature = signature, - return_type = return_type, - sql_ = sql_, - api_integration = var("API_INTEGRATION") - ) }} - {% else %} - {{ drop_function( - name_, - signature = signature, - ) }} - {% endif %} -{% endmacro %} diff --git a/models/sources.yml b/models/sources.yml index 6267a2e..3513a51 100644 --- a/models/sources.yml +++ b/models/sources.yml @@ -2,7 +2,7 @@ version: 2 sources: - name: crosschain - database: "{{ 'crosschain' if target.database == 'LIVEQUERY' else 'crosschain_dev' }}" + database: "{{ 'crosschain' if target.database == 'REFERENCE' else 'crosschain_dev' }}" schema: core tables: - name: dim_date_hours \ No newline at end of file diff --git a/profiles.yml b/profiles.yml index 7781bd2..e881083 100644 --- a/profiles.yml +++ b/profiles.yml @@ -1,4 +1,4 @@ -livequery: +reference: target: prod outputs: dev: From 8103f79fff43d1a436bd23bc8fa5f0dc3151a17f Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 11:19:12 -0600 Subject: [PATCH 02/23] docs --- .github/workflows/dbt_run_adhoc.yml | 5 +- data/verified_api_endpoints_20230221.csv | 12 -- .../doc_descriptions/general/__overview__.md | 117 +++--------------- 3 files changed, 15 insertions(+), 119 deletions(-) delete mode 100644 data/verified_api_endpoints_20230221.csv diff --git a/.github/workflows/dbt_run_adhoc.yml b/.github/workflows/dbt_run_adhoc.yml index b116e3e..b6a0e0b 100644 --- a/.github/workflows/dbt_run_adhoc.yml +++ b/.github/workflows/dbt_run_adhoc.yml @@ -48,9 +48,6 @@ jobs: runs-on: ubuntu-latest environment: name: workflow_${{ inputs.environment }} - strategy: - matrix: - command: ${{fromJson(inputs.dbt_command)}} steps: - uses: actions/checkout@v3 @@ -65,4 +62,4 @@ jobs: dbt deps - name: Run DBT Jobs run: | - ${{ matrix.command }} \ No newline at end of file + ${{ inputs.dbt_command }} \ No newline at end of file diff --git a/data/verified_api_endpoints_20230221.csv b/data/verified_api_endpoints_20230221.csv deleted file mode 100644 index c50c94f..0000000 --- a/data/verified_api_endpoints_20230221.csv +++ /dev/null @@ -1,12 +0,0 @@ -Project,Endpoint,Type,Documentation -Snapshot,https://hub.snapshot.org/graphql,GraphQL,https://docs.snapshot.org/graphql-api -Defillama,https://api.llama.fi/,REST,https://defillama.com/docs/api -Defillama,https://yields.llama.fi/,REST,https://defillama.com/docs/api -Defillama,https://stablecoins.llama.fi/,REST,https://defillama.com/docs/api -Defillama,https://bridges.llama.fi/,REST,https://defillama.com/docs/api -Defillama,https://coins.llama.fi/,REST,https://defillama.com/docs/api -zkSync,https://api.zksync.io/api/v0.2/,REST,https://docs.zksync.io/apiv02-docs/ -DeepNFT Value,https://api.deepnftvalue.com/v1,REST,https://deepnftvalue.readme.io/reference/getting-started-with-deepnftvalue-api -Zapper,https://api.zapper.fi/v2/,REST,https://api.zapper.fi/api/static/index.html#/Apps/AppsController_getApps -Helius,https://api.helius.xyz,REST,https://docs.helius.xyz/introduction/why-helius -Stargaze Name Service,https://rest.stargaze-apis.com,REST,https://github.com/public-awesome/names/blob/main/API.md \ No newline at end of file diff --git a/models/doc_descriptions/general/__overview__.md b/models/doc_descriptions/general/__overview__.md index dafbf45..ab626b3 100644 --- a/models/doc_descriptions/general/__overview__.md +++ b/models/doc_descriptions/general/__overview__.md @@ -1,15 +1,15 @@ {% docs __overview__ %} -# Welcome to the Flipside Crypto LiveQuery Models Documentation! +# Welcome to the Flipside Crypto Reference Models Documentation! ## **What does this documentation cover?** -The documentation included here details the design of the LiveQuery functions available via [Flipside Crypto](https://flipsidecrypto.xyz/). For more information on how these functions are built, please see [the github repository.](https://github.com/FlipsideCrypto/livequery-models) +The documentation included here details the design of the Reference functions available via [Flipside Crypto](https://flipsidecrypto.xyz/). For more information on how these functions are built, please see [the github repository.](https://github.com/FlipsideCrypto/reference-models) ### **Overview of Available Functions** #### **UTILS Functions** -- `utils.hex_to_int`: Use this UDF to transform any hex string to integer +- `utils.udf_hex_to_int`: Use this UDF to transform any hex string to integer ``` ex: Curve Swaps @@ -24,111 +24,23 @@ The documentation included here details the design of the LiveQuery functions av '0xd013ca23e77a65003c2c659c5442c00c805371b7fc1ebd4c206c41d1536bd90b' ) ``` -- `utils.hex_encode_function` (coming soon)(Function VARCHAR): Use this UDF to hex encode any string +- `utils.udf_hex_to_string`: Use this UDF to transform any hexadecimal string to a regular string. The function removes any non-printable or control characters from the resulting string. ``` - ex: Decimals Function Signature + ex: Token Names + WITH base AS ( SELECT - `decimals` AS function_name, - utils.hex_encode_function(`decimals()`) :: STRING AS text_sig, - LEFT(text_sig,10) AS function_sig, - '0x313ce567' AS expected_sig - ``` -- `utils.evm_decode_logs` (coming soon) -- `utils.udf_register_secret` - -#### **LIVE Functions & Examples** - -- `live.udf_api`(Method STRING, URL STRING, Headers OBJECT, Data OBJECT): Use this UDF to make a GET or POST request on any API - ``` - ex: Defillama GET request -> working with the output (JSON flatten) - - WITH chain_base AS ( - SELECT - ethereum.streamline.udf_api( - 'GET','https://api.llama.fi/chains',{},{} - ) AS read - ) - - SELECT - VALUE:chainId::STRING AS chain_id, - VALUE:name::STRING AS chain, - VALUE:tokenSymbol::STRING AS token_symbol - FROM chain_base, - LATERAL FLATTEN (input=> read:data) - - ex: Solana Token Metadata - - SELECT - live.udf_api( - 'GET', - 'https://public-api.solscan.io/token/meta?tokenAddress=SPraYi59a21jEhqvPBbWuwmjA4vdTaSLbiRTefcHJSR', - { }, - { } - ); - - ex: Running with multiple token addresses at the same time - - WITH solana_addresses AS ( - SELECT - 'SPraYi59a21jEhqvPBbWuwmjA4vdTaSLbiRTefcHJSR' AS address - UNION - SELECT - '4KbzSz2VF1LCvEaw8viq1335VgWzNjMd8rwQMsCkKHip' - ) - SELECT - live.udf_api( - 'GET', - concat( - 'https://public-api.solscan.io/token/meta?tokenAddress=', - address - ), - { }, - { } + '0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000005452617265202d204368616e74616c20486167656c202d20576f6d656e2773204575726f2032303232202d2032303232205371756164202d20576f6d656e2773204e6174696f6e616c205465616d202d2032303232000000000000000000000000' AS input_token_name ) - FROM - solana_addresses; - ex: Hit Quicknode (see instructions below for how to register an API Key with Flipside securely) - - SELECT - live.udf_api( - 'POST', - concat( - 'http://sample-endpoint-name.network.quiknode.pro/', - '{my_key}' - ), - {}, - { 'method' :'eth_blockNumber', - 'params' :[], - 'id' :1, - 'jsonrpc' :'2.0' }, - 'quicknode' - ); + SELECT + utils.udf_hex_to_string(SUBSTR(input_token_name,(64+1),LEN(input_token_name))) AS output_token_name + FROM base; + + NOTE: The expression 64 + 1 is used in the query to calculate the starting index for the SUBSTR function. In a hexadecimal representation, each character represents 4 bits. Therefore, to skip the first 256 bits in the hexadecimal string, we need to skip the first 64 characters (64 * 4 = 256). In Snowflake, the SUBSTR function expects the starting index to be 1-based rather than 0-based, which is why we add + 1. + ``` -##### **Registering and Using LiveQuery Credentials to Query Quicknode** -With LiveQuery you can safely store encrypted credentials, such as an API key, with Flipside, and query blockchain nodes directly via our SQL interface. Here’s how: -1. Sign up for a free [Quicknode API Account](https://www.quicknode.com/core-api) -2. Navigate to ***Endpoints*** on the left hand side then click the ***Get Started*** tab and ***Copy*** the HTTP Provider Endpoint. Do not adjust the Setup or Security parameters. -3. Visit [Ephit](https://science.flipsidecrypto.xyz/ephit) to obtain an Ephemeral query that will securely link your API Endpoint to Flipside's backend. This will allow you to refer to the URL securely in our application without referencing it or exposing keys directly. -4. Fill out the form and click ***Submit this Credential*** -5. Paste the provided query into [Flipside](https://flipside.new) and query your node directly in the app with your submitted Credential (`{my_key}`). - -##### **API Endpoints available for use with LiveQuery:** -Please visit the Flipside discord or open a ticket for questions, concerns or more information. - -- Snapshot: [https://hub.snapshot.org/graphql](https://docs.snapshot.org/graphql-api) -- Defillama: [https://api.llama.fi/](https://defillama.com/docs/api) -- Defillama: [https://yields.llama.fi/](https://defillama.com/docs/api) -- Defillama: [https://stablecoins.llama.fi/](https://defillama.com/docs/api) -- Defillama: [https://bridges.llama.fi/](https://defillama.com/docs/api) -- Defillama: [https://coins.llama.fi/](https://defillama.com/docs/api) -- zkSync: [https://api.zksync.io/api/v0.2/](https://docs.zksync.io/apiv02-docs/) -- DeepNFT Value: [https://api.deepnftvalue.com/v1](https://deepnftvalue.readme.io/reference/getting-started-with-deepnftvalue-api) -- Zapper: [https://api.zapper.fi/v2/](https://api.zapper.fi/api/static/index.html#/Apps/AppsController_getApps) -- Helius: [https://api.helius.xyz](https://docs.helius.xyz/introduction/why-helius) -- Stargaze Name Service: [https://rest.stargaze-apis.com](https://github.com/public-awesome/names/blob/main/API.md) ## **Using dbt docs** ### Navigation @@ -151,9 +63,8 @@ Note that you can also right-click on models to interactively filter and explore ### **More information** - [Flipside](https://flipsidecrypto.xyz/) -- [Velocity](https://app.flipsidecrypto.com/velocity?nav=Discover) - [Tutorials](https://docs.flipsidecrypto.com/our-data/tutorials) -- [Github](https://github.com/FlipsideCrypto/external-models) +- [Github](https://github.com/FlipsideCrypto/reference-models) - [What is dbt?](https://docs.getdbt.com/docs/introduction) {% enddocs %} \ No newline at end of file From 5ec0b4b6e38598d4d9e3a4e75710e91531a95a5c Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 12:50:24 -0600 Subject: [PATCH 03/23] docs example --- models/doc_descriptions/general/__overview__.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/models/doc_descriptions/general/__overview__.md b/models/doc_descriptions/general/__overview__.md index ab626b3..11bed62 100644 --- a/models/doc_descriptions/general/__overview__.md +++ b/models/doc_descriptions/general/__overview__.md @@ -34,10 +34,10 @@ The documentation included here details the design of the Reference functions av ) SELECT - utils.udf_hex_to_string(SUBSTR(input_token_name,(64+1),LEN(input_token_name))) AS output_token_name + utils.udf_hex_to_string(SUBSTR(input_token_name,(64*2+3),LEN(input_token_name))) AS output_token_name FROM base; - NOTE: The expression 64 + 1 is used in the query to calculate the starting index for the SUBSTR function. In a hexadecimal representation, each character represents 4 bits. Therefore, to skip the first 256 bits in the hexadecimal string, we need to skip the first 64 characters (64 * 4 = 256). In Snowflake, the SUBSTR function expects the starting index to be 1-based rather than 0-based, which is why we add + 1. + NOTE: The expression 64 * 2 + 3 in the query navigates to the 131st character of the hexadecimal string returned by an EVM blockchain contract's function, skipping metadata and adjusting for Snowflake's 1-based indexing. Keep in mind that the exact start of relevant data may vary between different contracts and functions. ``` From afde74688043952f7dac072d7f90e4174fa4417f Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 13:27:03 -0600 Subject: [PATCH 04/23] docs --- models/doc_descriptions/general/__overview__.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/models/doc_descriptions/general/__overview__.md b/models/doc_descriptions/general/__overview__.md index 11bed62..a7214af 100644 --- a/models/doc_descriptions/general/__overview__.md +++ b/models/doc_descriptions/general/__overview__.md @@ -24,7 +24,7 @@ The documentation included here details the design of the Reference functions av '0xd013ca23e77a65003c2c659c5442c00c805371b7fc1ebd4c206c41d1536bd90b' ) ``` -- `utils.udf_hex_to_string`: Use this UDF to transform any hexadecimal string to a regular string. The function removes any non-printable or control characters from the resulting string. +- `utils.udf_hex_to_string`: Use this UDF to transform any hexadecimal string to a regular string, removing any non-printable or control characters from the resulting string. ``` ex: Token Names From 99ef480bc749b3e57d82471126a8cd9307bc0a70 Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 13:33:23 -0600 Subject: [PATCH 05/23] profile --- profiles.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/profiles.yml b/profiles.yml index e881083..a75e6e5 100644 --- a/profiles.yml +++ b/profiles.yml @@ -13,7 +13,7 @@ reference: schema: SILVER threads: 4 client_session_keep_alive: False - query_tag: livequery_curator + query_tag: reference_curator prod: type: snowflake account: "{{ env_var('ACCOUNT') }}" @@ -26,4 +26,4 @@ reference: schema: SILVER threads: 4 client_session_keep_alive: False - query_tag: livequery_curator \ No newline at end of file + query_tag: reference_curator \ No newline at end of file From 2dc85270018b97d96b4c56ebddd00f38fe3936c1 Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 15:38:21 -0600 Subject: [PATCH 06/23] macro test --- macros/hex_to_string_udf.sql | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 macros/hex_to_string_udf.sql diff --git a/macros/hex_to_string_udf.sql b/macros/hex_to_string_udf.sql new file mode 100644 index 0000000..b25bfd3 --- /dev/null +++ b/macros/hex_to_string_udf.sql @@ -0,0 +1,12 @@ +{% macro create_udf_hex_to_string(schema) %} +CREATE OR REPLACE FUNCTION {{ schema }}.udf_hex_to_string(hex STRING) + RETURNS TEXT + LANGUAGE SQL + STRICT IMMUTABLE AS +$$ + SELECT + LTRIM(regexp_replace( + try_hex_decode_string(hex), + '[\x00-\x1F\x7F-\x9F\xAD]', '', 1)) +$$; +{% endmacro %} \ No newline at end of file From 1b95ebd4b36a6e9844fbe53a96f0bed11d53624d Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 16:01:45 -0600 Subject: [PATCH 07/23] macro --- macros/create_udfs.sql | 1 - macros/hex_to_string_udf.sql | 12 ------------ 2 files changed, 13 deletions(-) delete mode 100644 macros/hex_to_string_udf.sql diff --git a/macros/create_udfs.sql b/macros/create_udfs.sql index bdfa5da..b20d5ed 100644 --- a/macros/create_udfs.sql +++ b/macros/create_udfs.sql @@ -6,7 +6,6 @@ {% set udfs = fromyaml(name) %} {% set sql %} CREATE schema if NOT EXISTS silver; - CREATE schema if NOT EXISTS beta; CREATE schema if NOT EXISTS utils; CREATE schema if NOT EXISTS _utils; CREATE schema if NOT EXISTS _live; diff --git a/macros/hex_to_string_udf.sql b/macros/hex_to_string_udf.sql deleted file mode 100644 index b25bfd3..0000000 --- a/macros/hex_to_string_udf.sql +++ /dev/null @@ -1,12 +0,0 @@ -{% macro create_udf_hex_to_string(schema) %} -CREATE OR REPLACE FUNCTION {{ schema }}.udf_hex_to_string(hex STRING) - RETURNS TEXT - LANGUAGE SQL - STRICT IMMUTABLE AS -$$ - SELECT - LTRIM(regexp_replace( - try_hex_decode_string(hex), - '[\x00-\x1F\x7F-\x9F\xAD]', '', 1)) -$$; -{% endmacro %} \ No newline at end of file From 43de0831d6ef8b0560be82c6c5c59b4356c347a5 Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 16:06:31 -0600 Subject: [PATCH 08/23] reverted macro --- macros/hex_to_str_udf.sql | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 macros/hex_to_str_udf.sql diff --git a/macros/hex_to_str_udf.sql b/macros/hex_to_str_udf.sql new file mode 100644 index 0000000..7153cc0 --- /dev/null +++ b/macros/hex_to_str_udf.sql @@ -0,0 +1,5 @@ +{% macro udf_hex_to_string(hex_string) %} + LTRIM(regexp_replace( + try_hex_decode_string({{ hex_string }}), + '[\x00-\x1F\x7F-\x9F\xAD]', '', 1)) +{% endmacro %} \ No newline at end of file From 28ef0ab1d21d3abc9220a003f6d3286f8e5f77ff Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 16:50:25 -0600 Subject: [PATCH 09/23] test create udf --- macros/streamline/configs.yaml.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/macros/streamline/configs.yaml.sql b/macros/streamline/configs.yaml.sql index 471371e..2a170c2 100644 --- a/macros/streamline/configs.yaml.sql +++ b/macros/streamline/configs.yaml.sql @@ -4,7 +4,7 @@ UTILITY SCHEMA #} -- name: utils.udf_hex_to_int +{# - name: utils.udf_hex_to_int signature: - [hex, STRING] return_type: TEXT @@ -28,7 +28,7 @@ RUNTIME_VERSION = '3.8' HANDLER = 'hex_to_int' sql: | - {{ python_udf_hex_to_int_with_encoding() | indent(4) }} + {{ python_udf_hex_to_int_with_encoding() | indent(4) }} #} - name: utils.udf_hex_to_string signature: From 1602da7fdb4e231f001fc0c9c05bb5098f0ff7f7 Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 16:59:00 -0600 Subject: [PATCH 10/23] reverted test --- macros/streamline/configs.yaml.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/macros/streamline/configs.yaml.sql b/macros/streamline/configs.yaml.sql index 2a170c2..471371e 100644 --- a/macros/streamline/configs.yaml.sql +++ b/macros/streamline/configs.yaml.sql @@ -4,7 +4,7 @@ UTILITY SCHEMA #} -{# - name: utils.udf_hex_to_int +- name: utils.udf_hex_to_int signature: - [hex, STRING] return_type: TEXT @@ -28,7 +28,7 @@ RUNTIME_VERSION = '3.8' HANDLER = 'hex_to_int' sql: | - {{ python_udf_hex_to_int_with_encoding() | indent(4) }} #} + {{ python_udf_hex_to_int_with_encoding() | indent(4) }} - name: utils.udf_hex_to_string signature: From f1b129935b5989a963e264e01eb53404b76a5015 Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 17:16:19 -0600 Subject: [PATCH 11/23] ref on functions --- macros/streamline/configs.yaml.sql | 4 ++-- macros/streamline/utils.sql | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/macros/streamline/configs.yaml.sql b/macros/streamline/configs.yaml.sql index 471371e..2f9d8d6 100644 --- a/macros/streamline/configs.yaml.sql +++ b/macros/streamline/configs.yaml.sql @@ -15,7 +15,7 @@ RUNTIME_VERSION = '3.8' HANDLER = 'hex_to_int' sql: | - {{ python_hex_to_int() | indent(4) }} + {{ reference_models.python_hex_to_int() | indent(4) }} - name: utils.udf_hex_to_int signature: - [encoding, STRING] @@ -28,7 +28,7 @@ RUNTIME_VERSION = '3.8' HANDLER = 'hex_to_int' sql: | - {{ python_udf_hex_to_int_with_encoding() | indent(4) }} + {{ reference_models.python_udf_hex_to_int_with_encoding() | indent(4) }} - name: utils.udf_hex_to_string signature: diff --git a/macros/streamline/utils.sql b/macros/streamline/utils.sql index 04e28e5..37dc6cf 100644 --- a/macros/streamline/utils.sql +++ b/macros/streamline/utils.sql @@ -66,7 +66,7 @@ {% set func_type = config ["func_type"] %} {% if not drop_ -%} - {{ create_sql_function( + {{ reference_models.create_sql_function( name_ = name_, signature = signature, return_type = return_type, From 84a954d74eec86386d4000faddec156cc1508033 Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 17:18:58 -0600 Subject: [PATCH 12/23] test macros --- analysis/test_create_or_drop_function_from_config.sql | 8 -------- macros/streamline/utils.sql | 8 ++++---- 2 files changed, 4 insertions(+), 12 deletions(-) delete mode 100644 analysis/test_create_or_drop_function_from_config.sql diff --git a/analysis/test_create_or_drop_function_from_config.sql b/analysis/test_create_or_drop_function_from_config.sql deleted file mode 100644 index 32168bf..0000000 --- a/analysis/test_create_or_drop_function_from_config.sql +++ /dev/null @@ -1,8 +0,0 @@ -{%- set name -%} - {{- udf_configs() -}} -{% endset %} -{% set udfs = fromyaml(name) %} -{%- for udf in udfs -%} -{{- create_or_drop_function_from_config(udf, drop_=True) -}} -{{- create_or_drop_function_from_config(udf, drop_=False) -}} -{% endfor %} diff --git a/macros/streamline/utils.sql b/macros/streamline/utils.sql index 37dc6cf..27acc26 100644 --- a/macros/streamline/utils.sql +++ b/macros/streamline/utils.sql @@ -2,7 +2,7 @@ func_name, signature ) %} - DROP FUNCTION IF EXISTS {{ func_name }}({{ compile_signature(signature, drop_ = True) }}); + DROP FUNCTION IF EXISTS {{ func_name }}({{ reference_models.compile_signature(signature, drop_ = True) }}); {% endmacro %} {%- macro construct_api_route(route) -%} @@ -35,7 +35,7 @@ func_type = none ) %} CREATE OR REPLACE {{ func_type }} FUNCTION {{ name_ }}( - {{- compile_signature(signature) }} + {{- reference_models.compile_signature(signature) }} ) COPY GRANTS RETURNS {{ return_type }} @@ -44,7 +44,7 @@ {% endif %} {%- if api_integration -%} api_integration = {{ api_integration }} - AS {{ construct_api_route(sql_) ~ ";" }} + AS {{ reference_models.construct_api_route(sql_) ~ ";" }} {% else -%} AS $$ @@ -76,7 +76,7 @@ func_type = func_type ) }} {%- else -%} - {{ drop_function( + {{ reference_models.drop_function( name_, signature = signature, ) }} From 408192bf65fc9bbb640216a4e577e94dc6cd3138 Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 17:25:28 -0600 Subject: [PATCH 13/23] create udfs --- macros/create_udfs.sql | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/macros/create_udfs.sql b/macros/create_udfs.sql index b20d5ed..43d4e60 100644 --- a/macros/create_udfs.sql +++ b/macros/create_udfs.sql @@ -1,17 +1,13 @@ {% macro create_udfs(drop_=False) %} {% if var("UPDATE_UDFS_AND_SPS") %} {% set name %} - {{- udf_configs() -}} + {{- reference_models.udf_configs() -}} {% endset %} {% set udfs = fromyaml(name) %} {% set sql %} - CREATE schema if NOT EXISTS silver; CREATE schema if NOT EXISTS utils; - CREATE schema if NOT EXISTS _utils; - CREATE schema if NOT EXISTS _live; - CREATE schema if NOT EXISTS live; {%- for udf in udfs -%} - {{- create_or_drop_function_from_config(udf, drop_=drop_) -}} + {{- reference_models.create_or_drop_function_from_config(udf, drop_=drop_) -}} {% endfor %} {% endset %} {% do run_query(sql) %} From adda21a75f2ba7eb1a94bb5c43ec3d119578502b Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 17:29:43 -0600 Subject: [PATCH 14/23] dynamic schema --- macros/create_udfs.sql | 6 +++--- macros/streamline/configs.yaml.sql | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/macros/create_udfs.sql b/macros/create_udfs.sql index 43d4e60..b6af668 100644 --- a/macros/create_udfs.sql +++ b/macros/create_udfs.sql @@ -1,11 +1,11 @@ -{% macro create_udfs(drop_=False) %} +{% macro create_udfs(drop_=False,schema="utils") %} {% if var("UPDATE_UDFS_AND_SPS") %} {% set name %} - {{- reference_models.udf_configs() -}} + {{- reference_models.udf_configs(schema) -}} {% endset %} {% set udfs = fromyaml(name) %} {% set sql %} - CREATE schema if NOT EXISTS utils; + CREATE schema if NOT EXISTS {{ schema }}; {%- for udf in udfs -%} {{- reference_models.create_or_drop_function_from_config(udf, drop_=drop_) -}} {% endfor %} diff --git a/macros/streamline/configs.yaml.sql b/macros/streamline/configs.yaml.sql index 2f9d8d6..7395d08 100644 --- a/macros/streamline/configs.yaml.sql +++ b/macros/streamline/configs.yaml.sql @@ -1,10 +1,10 @@ -{% macro udf_configs() %} +{% macro udf_configs(schema) %} {# UTILITY SCHEMA #} -- name: utils.udf_hex_to_int +- name: {{ schema }}.udf_hex_to_int signature: - [hex, STRING] return_type: TEXT @@ -16,7 +16,7 @@ HANDLER = 'hex_to_int' sql: | {{ reference_models.python_hex_to_int() | indent(4) }} -- name: utils.udf_hex_to_int +- name: {{ schema }}.udf_hex_to_int signature: - [encoding, STRING] - [hex, STRING] @@ -30,7 +30,7 @@ sql: | {{ reference_models.python_udf_hex_to_int_with_encoding() | indent(4) }} -- name: utils.udf_hex_to_string +- name: {{ schema }}.udf_hex_to_string signature: - [hex, STRING] return_type: TEXT From 0ffa7c53557a58f49fe6e35f848a6b39802b827d Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 18:05:09 -0600 Subject: [PATCH 15/23] removed unnecessary files and renamed --- .github/workflows/dbt_run_dev_refresh.yml | 44 ------ dbt_project.yml | 15 +-- macros/create_sps.sql | 7 +- macros/create_udfs.sql | 4 +- macros/hex_to_str_udf.sql | 5 - macros/run_sp_create_prod_clone.sql | 10 -- macros/sp_create_prod_clone.sql | 44 ------ macros/streamline/configs.yaml.sql | 4 +- macros/streamline/utils.sql | 10 +- macros/tags/add_database_or_schema_tags.sql | 3 - macros/tags/snowflake_tagging.sql | 127 ------------------ .../doc_descriptions/general/__overview__.md | 6 +- profiles.yml | 4 +- 13 files changed, 17 insertions(+), 266 deletions(-) delete mode 100644 .github/workflows/dbt_run_dev_refresh.yml delete mode 100644 macros/hex_to_str_udf.sql delete mode 100644 macros/run_sp_create_prod_clone.sql delete mode 100644 macros/sp_create_prod_clone.sql delete mode 100644 macros/tags/add_database_or_schema_tags.sql delete mode 100644 macros/tags/snowflake_tagging.sql diff --git a/.github/workflows/dbt_run_dev_refresh.yml b/.github/workflows/dbt_run_dev_refresh.yml deleted file mode 100644 index ea29450..0000000 --- a/.github/workflows/dbt_run_dev_refresh.yml +++ /dev/null @@ -1,44 +0,0 @@ -name: dbt_run_dev_refresh -run-name: dbt_run_dev_refresh - -on: - workflow_dispatch: - schedule: - # Runs "at 9:00 UTC" (see https://crontab.guru) - - cron: '0 9 * * *' - -env: - DBT_PROFILES_DIR: ./ - - ACCOUNT: "${{ vars.ACCOUNT }}" - ROLE: "${{ vars.ROLE }}" - USER: "${{ vars.USER }}" - PASSWORD: "${{ secrets.PASSWORD }}" - REGION: "${{ vars.REGION }}" - DATABASE: "${{ vars.DATABASE }}" - WAREHOUSE: "${{ vars.WAREHOUSE }}" - SCHEMA: "${{ vars.SCHEMA }}" - -concurrency: - group: ${{ github.workflow }} - -jobs: - run_dbt_jobs: - runs-on: ubuntu-latest - environment: - name: workflow_prod - - steps: - - uses: actions/checkout@v3 - - - uses: actions/setup-python@v1 - with: - python-version: "3.7.x" - - - name: install dependencies - run: | - pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click - dbt deps - - name: Run DBT Jobs - run: | - dbt run-operation run_sp_create_prod_clone \ No newline at end of file diff --git a/dbt_project.yml b/dbt_project.yml index fb47932..4a7b7aa 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -1,7 +1,7 @@ # Name your project! Project names should contain only lowercase characters # and underscores. A good package name should reflect your organization's # name or the intended use of these models -name: "reference_models" +name: "fsc_utils" version: "1.0.0" config-version: 2 @@ -27,13 +27,6 @@ clean-targets: # directories to be removed by `dbt clean` tests: +store_failures: true # all tests -on-run-start: - - "{{ create_sps() }}" - - "{{ create_udfs() }}" - -on-run-end: - - "{{ apply_meta_as_tags(results) }}" - # Configuring models # Full documentation: https://docs.getdbt.com/docs/configuring-models @@ -44,10 +37,4 @@ on-run-end: vars: "dbt_date:time_zone": GMT UPDATE_UDFS_AND_SPS: false - UPDATE_SNOWFLAKE_TAGS: true - STREAMLINE_INVOKE_STREAMS: False - STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False - REST_API_ID_PROD: hn8uqhku77 - REST_API_ID_DEV: hn8uqhku77 - API_INTEGRATION: AWS_LIVE_QUERY{{ '_DEV' if target.name != 'prod' else '' }} AWS_REGION: us-east-1 diff --git a/macros/create_sps.sql b/macros/create_sps.sql index ea91fdd..bbf917e 100644 --- a/macros/create_sps.sql +++ b/macros/create_sps.sql @@ -1,8 +1,5 @@ -{% macro create_sps() %} +{# {% macro create_sps() %} {% if var("UPDATE_UDFS_AND_SPS") %} - {% if target.database == 'REFERENCE' %} - CREATE schema IF NOT EXISTS _internal; - {{ sp_create_prod_clone('_internal') }}; {% endif %} {% endif %} -{% endmacro %} +{% endmacro %} #} diff --git a/macros/create_udfs.sql b/macros/create_udfs.sql index b6af668..77c8adc 100644 --- a/macros/create_udfs.sql +++ b/macros/create_udfs.sql @@ -1,13 +1,13 @@ {% macro create_udfs(drop_=False,schema="utils") %} {% if var("UPDATE_UDFS_AND_SPS") %} {% set name %} - {{- reference_models.udf_configs(schema) -}} + {{- fsc_utils.udf_configs(schema) -}} {% endset %} {% set udfs = fromyaml(name) %} {% set sql %} CREATE schema if NOT EXISTS {{ schema }}; {%- for udf in udfs -%} - {{- reference_models.create_or_drop_function_from_config(udf, drop_=drop_) -}} + {{- fsc_utils.create_or_drop_function_from_config(udf, drop_=drop_) -}} {% endfor %} {% endset %} {% do run_query(sql) %} diff --git a/macros/hex_to_str_udf.sql b/macros/hex_to_str_udf.sql deleted file mode 100644 index 7153cc0..0000000 --- a/macros/hex_to_str_udf.sql +++ /dev/null @@ -1,5 +0,0 @@ -{% macro udf_hex_to_string(hex_string) %} - LTRIM(regexp_replace( - try_hex_decode_string({{ hex_string }}), - '[\x00-\x1F\x7F-\x9F\xAD]', '', 1)) -{% endmacro %} \ No newline at end of file diff --git a/macros/run_sp_create_prod_clone.sql b/macros/run_sp_create_prod_clone.sql deleted file mode 100644 index 19d9e7e..0000000 --- a/macros/run_sp_create_prod_clone.sql +++ /dev/null @@ -1,10 +0,0 @@ -{% macro run_sp_create_prod_clone() %} - {% set clone_query %} - call reference._internal.create_prod_clone( - 'reference', - 'reference_dev', - 'reference_dev_owner' - ); -{% endset %} - {% do run_query(clone_query) %} -{% endmacro %} diff --git a/macros/sp_create_prod_clone.sql b/macros/sp_create_prod_clone.sql deleted file mode 100644 index 20ee897..0000000 --- a/macros/sp_create_prod_clone.sql +++ /dev/null @@ -1,44 +0,0 @@ -{% macro sp_create_prod_clone(target_schema) -%} - -create or replace procedure {{ target_schema }}.create_prod_clone(source_db_name string, destination_db_name string, role_name string) -returns boolean -language javascript -execute as caller -as -$$ - snowflake.execute({sqlText: `BEGIN TRANSACTION;`}); - try { - snowflake.execute({sqlText: `CREATE OR REPLACE DATABASE ${DESTINATION_DB_NAME} CLONE ${SOURCE_DB_NAME}`}); - snowflake.execute({sqlText: `DROP SCHEMA IF EXISTS ${DESTINATION_DB_NAME}._INTERNAL`}); /* this only needs to be in prod */ - - snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL SCHEMAS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`}); - snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL FUNCTIONS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`}); - snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL PROCEDURES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`}); - snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL VIEWS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`}); - snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL STAGES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`}); - snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL TABLES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`}); - snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE FUNCTIONS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`}); - snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE PROCEDURES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`}); - snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE VIEWS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`}); - snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE STAGES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`}); - snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE TABLES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`}); - - snowflake.execute({sqlText: `GRANT OWNERSHIP ON DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`}) - - var existing_tags = snowflake.execute({sqlText: `SHOW TAGS IN DATABASE ${DESTINATION_DB_NAME};`}); - while (existing_tags.next()) { - var schema = existing_tags.getColumnValue(4); - var tag_name = existing_tags.getColumnValue(2) - snowflake.execute({sqlText: `GRANT OWNERSHIP ON TAG ${DESTINATION_DB_NAME}.${schema}.${tag_name} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`}); - } - - snowflake.execute({sqlText: `COMMIT;`}); - } catch (err) { - snowflake.execute({sqlText: `ROLLBACK;`}); - throw(err); - } - - return true -$$ - -{%- endmacro %} \ No newline at end of file diff --git a/macros/streamline/configs.yaml.sql b/macros/streamline/configs.yaml.sql index 7395d08..7e09ec1 100644 --- a/macros/streamline/configs.yaml.sql +++ b/macros/streamline/configs.yaml.sql @@ -15,7 +15,7 @@ RUNTIME_VERSION = '3.8' HANDLER = 'hex_to_int' sql: | - {{ reference_models.python_hex_to_int() | indent(4) }} + {{ fsc_utils.python_hex_to_int() | indent(4) }} - name: {{ schema }}.udf_hex_to_int signature: - [encoding, STRING] @@ -28,7 +28,7 @@ RUNTIME_VERSION = '3.8' HANDLER = 'hex_to_int' sql: | - {{ reference_models.python_udf_hex_to_int_with_encoding() | indent(4) }} + {{ fsc_utils.python_udf_hex_to_int_with_encoding() | indent(4) }} - name: {{ schema }}.udf_hex_to_string signature: diff --git a/macros/streamline/utils.sql b/macros/streamline/utils.sql index 27acc26..f67b6dd 100644 --- a/macros/streamline/utils.sql +++ b/macros/streamline/utils.sql @@ -2,7 +2,7 @@ func_name, signature ) %} - DROP FUNCTION IF EXISTS {{ func_name }}({{ reference_models.compile_signature(signature, drop_ = True) }}); + DROP FUNCTION IF EXISTS {{ func_name }}({{ fsc_utils.compile_signature(signature, drop_ = True) }}); {% endmacro %} {%- macro construct_api_route(route) -%} @@ -35,7 +35,7 @@ func_type = none ) %} CREATE OR REPLACE {{ func_type }} FUNCTION {{ name_ }}( - {{- reference_models.compile_signature(signature) }} + {{- fsc_utils.compile_signature(signature) }} ) COPY GRANTS RETURNS {{ return_type }} @@ -44,7 +44,7 @@ {% endif %} {%- if api_integration -%} api_integration = {{ api_integration }} - AS {{ reference_models.construct_api_route(sql_) ~ ";" }} + AS {{ fsc_utils.construct_api_route(sql_) ~ ";" }} {% else -%} AS $$ @@ -66,7 +66,7 @@ {% set func_type = config ["func_type"] %} {% if not drop_ -%} - {{ reference_models.create_sql_function( + {{ fsc_utils.create_sql_function( name_ = name_, signature = signature, return_type = return_type, @@ -76,7 +76,7 @@ func_type = func_type ) }} {%- else -%} - {{ reference_models.drop_function( + {{ fsc_utils.drop_function( name_, signature = signature, ) }} diff --git a/macros/tags/add_database_or_schema_tags.sql b/macros/tags/add_database_or_schema_tags.sql deleted file mode 100644 index 7cdc4d2..0000000 --- a/macros/tags/add_database_or_schema_tags.sql +++ /dev/null @@ -1,3 +0,0 @@ -{% macro add_database_or_schema_tags() %} - {{ set_database_tag_value('BLOCKCHAIN_NAME','LIVEQUERY') }} -{% endmacro %} \ No newline at end of file diff --git a/macros/tags/snowflake_tagging.sql b/macros/tags/snowflake_tagging.sql deleted file mode 100644 index bc25e69..0000000 --- a/macros/tags/snowflake_tagging.sql +++ /dev/null @@ -1,127 +0,0 @@ -{% macro apply_meta_as_tags(results) %} - {% if var("UPDATE_SNOWFLAKE_TAGS") %} - {{ log('apply_meta_as_tags', info=False) }} - {{ log(results, info=False) }} - {% if execute %} - - {%- set tags_by_schema = {} -%} - {% for res in results -%} - {% if res.node.meta.database_tags %} - - {%- set model_database = res.node.database -%} - {%- set model_schema = res.node.schema -%} - {%- set model_schema_full = model_database+'.'+model_schema -%} - {%- set model_alias = res.node.alias -%} - - {% if model_schema_full not in tags_by_schema.keys() %} - {{ log('need to fetch tags for schema '+model_schema_full, info=False) }} - {%- call statement('main', fetch_result=True) -%} - show tags in {{model_database}}.{{model_schema}} - {%- endcall -%} - {%- set _ = tags_by_schema.update({model_schema_full: load_result('main')['table'].columns.get('name').values()|list}) -%} - {{ log('Added tags to cache', info=False) }} - {% else %} - {{ log('already have tag info for schema', info=False) }} - {% endif %} - - {%- set current_tags_in_schema = tags_by_schema[model_schema_full] -%} - {{ log('current_tags_in_schema:', info=False) }} - {{ log(current_tags_in_schema, info=False) }} - {{ log("========== Processing tags for "+model_schema_full+"."+model_alias+" ==========", info=False) }} - - {% set line -%} - node: {{ res.node.unique_id }}; status: {{ res.status }} (message: {{ res.message }}) - node full: {{ res.node}} - meta: {{ res.node.meta}} - materialized: {{ res.node.config.materialized }} - {%- endset %} - {{ log(line, info=False) }} - - {%- call statement('main', fetch_result=True) -%} - select LEVEL,UPPER(TAG_NAME) as TAG_NAME,TAG_VALUE from table(information_schema.tag_references_all_columns('{{model_schema}}.{{model_alias}}', 'table')) - {%- endcall -%} - {%- set existing_tags_for_table = load_result('main')['data'] -%} - {{ log('Existing tags for table:', info=False) }} - {{ log(existing_tags_for_table, info=False) }} - - {{ log('--', info=False) }} - {% for table_tag in res.node.meta.database_tags.table %} - - {{ create_tag_if_missing(current_tags_in_schema,table_tag|upper) }} - {% set desired_tag_value = res.node.meta.database_tags.table[table_tag] %} - - {{set_table_tag_value_if_different(model_schema,model_alias,table_tag,desired_tag_value,existing_tags_for_table)}} - {% endfor %} - {{ log("========== Finished processing tags for "+model_alias+" ==========", info=False) }} - {% endif %} - {% endfor %} - {% endif %} - {% endif %} -{% endmacro %} - - -{% macro create_tag_if_missing(all_tag_names,table_tag) %} - {% if table_tag not in all_tag_names %} - {{ log('Creating missing tag '+table_tag, info=False) }} - {%- call statement('main', fetch_result=True) -%} - create tag if not exists silver.{{table_tag}} - {%- endcall -%} - {{ log(load_result('main').data, info=False) }} - {% else %} - {{ log('Tag already exists: '+table_tag, info=False) }} - {% endif %} -{% endmacro %} - -{% macro set_table_tag_value_if_different(model_schema,table_name,tag_name,desired_tag_value,existing_tags) %} - {{ log('Ensuring tag '+tag_name+' has value '+desired_tag_value+' at table level', info=False) }} - {%- set existing_tag_for_table = existing_tags|selectattr('0','equalto','TABLE')|selectattr('1','equalto',tag_name|upper)|list -%} - {{ log('Filtered tags for table:', info=False) }} - {{ log(existing_tag_for_table[0], info=False) }} - {% if existing_tag_for_table|length > 0 and existing_tag_for_table[0][2]==desired_tag_value %} - {{ log('Correct tag value already exists', info=False) }} - {% else %} - {{ log('Setting tag value for '+tag_name+' to value '+desired_tag_value, info=False) }} - {%- call statement('main', fetch_result=True) -%} - alter table {{model_schema}}.{{table_name}} set tag {{tag_name}} = '{{desired_tag_value}}' - {%- endcall -%} - {{ log(load_result('main').data, info=False) }} - {% endif %} -{% endmacro %} - -{% macro set_column_tag_value_if_different(table_name,column_name,tag_name,desired_tag_value,existing_tags) %} - {{ log('Ensuring tag '+tag_name+' has value '+desired_tag_value+' at column level', info=False) }} - {%- set existing_tag_for_column = existing_tags|selectattr('0','equalto','COLUMN')|selectattr('1','equalto',tag_name|upper)|list -%} - {{ log('Filtered tags for column:', info=False) }} - {{ log(existing_tag_for_column[0], info=False) }} - {% if existing_tag_for_column|length > 0 and existing_tag_for_column[0][2]==desired_tag_value %} - {{ log('Correct tag value already exists', info=False) }} - {% else %} - {{ log('Setting tag value for '+tag_name+' to value '+desired_tag_value, info=False) }} - {%- call statement('main', fetch_result=True) -%} - alter table {{table_name}} modify column {{column_name}} set tag {{tag_name}} = '{{desired_tag_value}}' - {%- endcall -%} - {{ log(load_result('main').data, info=False) }} - {% endif %} -{% endmacro %} - -{% macro set_database_tag_value(tag_name,tag_value) %} - {% set query %} - create tag if not exists silver.{{tag_name}} - {% endset %} - {% do run_query(query) %} - {% set query %} - alter database {{target.database}} set tag {{target.database}}.silver.{{tag_name}} = '{{tag_value}}' - {% endset %} - {% do run_query(query) %} -{% endmacro %} - -{% macro set_schema_tag_value(target_schema,tag_name,tag_value) %} - {% set query %} - create tag if not exists silver.{{tag_name}} - {% endset %} - {% do run_query(query) %} - {% set query %} - alter schema {{target.database}}.{{target_schema}} set tag {{target.database}}.silver.{{tag_name}} = '{{tag_value}}' - {% endset %} - {% do run_query(query) %} -{% endmacro %} \ No newline at end of file diff --git a/models/doc_descriptions/general/__overview__.md b/models/doc_descriptions/general/__overview__.md index a7214af..e0ddda4 100644 --- a/models/doc_descriptions/general/__overview__.md +++ b/models/doc_descriptions/general/__overview__.md @@ -1,9 +1,9 @@ {% docs __overview__ %} -# Welcome to the Flipside Crypto Reference Models Documentation! +# Welcome to the Flipside Crypto Utility Functions Documentation! ## **What does this documentation cover?** -The documentation included here details the design of the Reference functions available via [Flipside Crypto](https://flipsidecrypto.xyz/). For more information on how these functions are built, please see [the github repository.](https://github.com/FlipsideCrypto/reference-models) +The documentation included here details the design of the utility functions available via [Flipside Crypto](https://flipsidecrypto.xyz/). For more information on how these functions are built, please see [the github repository.](https://github.com/FlipsideCrypto/fsc-utils) ### **Overview of Available Functions** @@ -64,7 +64,7 @@ Note that you can also right-click on models to interactively filter and explore ### **More information** - [Flipside](https://flipsidecrypto.xyz/) - [Tutorials](https://docs.flipsidecrypto.com/our-data/tutorials) -- [Github](https://github.com/FlipsideCrypto/reference-models) +- [Github](https://github.com/FlipsideCrypto/fsc-utils) - [What is dbt?](https://docs.getdbt.com/docs/introduction) {% enddocs %} \ No newline at end of file diff --git a/profiles.yml b/profiles.yml index a75e6e5..4888674 100644 --- a/profiles.yml +++ b/profiles.yml @@ -13,7 +13,7 @@ reference: schema: SILVER threads: 4 client_session_keep_alive: False - query_tag: reference_curator + query_tag: fsc_utils_curator prod: type: snowflake account: "{{ env_var('ACCOUNT') }}" @@ -26,4 +26,4 @@ reference: schema: SILVER threads: 4 client_session_keep_alive: False - query_tag: reference_curator \ No newline at end of file + query_tag: fsc_utils_curator \ No newline at end of file From 6ce2230e51706e52d3a2105a324f7d0aad61f967 Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 18:20:24 -0600 Subject: [PATCH 16/23] updated readme --- README.md | 103 ++++++++++++------------------------------------------ 1 file changed, 23 insertions(+), 80 deletions(-) diff --git a/README.md b/README.md index 181f6a7..19aa0fa 100644 --- a/README.md +++ b/README.md @@ -1,42 +1,6 @@ -# LiveQuery Models +# Flipside Utility Functions -Dbt repo for managing LiveQuery database. - -## Profile Set Up - -Use the following within profiles.yml - -```yml -livequery: - target: dev - outputs: - dev: - type: snowflake - account: - role: - user: - password: - region: - database: LIVEQUERY_DEV - warehouse: - schema: silver - threads: 12 - client_session_keep_alive: False - query_tag: - prod: - type: snowflake - account: - role: - user: - password: - region: - database: LIVEQUERY_DEV - warehouse: - schema: silver - threads: 12 - client_session_keep_alive: False - query_tag: -``` +Dbt repo for managing the Flipside Utility Functions (FSC_UTILS) dbt package. ## Variables @@ -58,6 +22,27 @@ dbt run-operation create_udfs --var 'UPDATE_UDFS_AND_SPS": True' --args 'drop_:f dbt run-operation create_udfs --var 'UPDATE_UDFS_AND_SPS": True' --args 'drop_:true' ``` +## Adding the `fsc_utils` dbt package + +The `fsc_utils` dbt package is a centralized repository consisting of various dbt macros and snowflake functions that can be utilized across other repos. + +1. Navigate to the `create_udfs.sql` macro in your respective repo where you want to install the package. +2. Add the following: +``` +{% set name %} +{{- fsc_utils.create_udfs() -}} +{% endset %} +{% do run_query(sql) %} +``` +3. Note: fsc_utils.create_udfs() takes two parameters (drop_=False, schema=utils). Set `drop_` to `True` to drop existing functions or define `schema` for the functions (default set to `utils`). Params not required. +4. Navigate to `packages.yml` in your respective repo. +5. Add the following: +``` +- git: https://github.com/FlipsideCrypto/fsc-utils.git +``` +6. Run `dbt deps` to install the package +7. Run the macro `dbt run-operation create_udfs --var '{"UPDATE_UDFS_AND_SPS":True}'` + ## Resources * Learn more about dbt [in the docs](https://docs.getdbt.com/docs/introduction) @@ -65,45 +50,3 @@ dbt run-operation create_udfs --var 'UPDATE_UDFS_AND_SPS": True' --args 'drop_:t * Join the [chat](https://community.getdbt.com/) on Slack for live discussions and support * Find [dbt events](https://events.getdbt.com) near you * Check out [the blog](https://blog.getdbt.com/) for the latest news on dbt's development and best practices - -## Applying Model Tags - -## Database / Schema level tags - -Database and schema tags are applied via the `add_database_or_schema_tags` macro. These tags are inherited by their downstream objects. To add/modify tags call the appropriate tag set function within the macro. - -```jinja -{{ set_database_tag_value('SOME_DATABASE_TAG_KEY','SOME_DATABASE_TAG_VALUE') }} -{{ set_schema_tag_value('SOME_SCHEMA_TAG_KEY','SOME_SCHEMA_TAG_VALUE') }} -``` - -### Model tags - -To add/update a model's snowflake tags, add/modify the `meta` model property under `config` . Only table level tags are supported at this time via DBT. - -```jinja -{{ config( - ... - meta={ - 'database_tags':{ - 'table': { - 'PURPOSE': 'SOME_PURPOSE' - } - } - }, - ... -) }} -``` - -By default, model tags are pushed to Snowflake on each load. You can disable this by setting the `UPDATE_SNOWFLAKE_TAGS` project variable to `False` during a run. - -```sh -dbt run --var '{"UPDATE_SNOWFLAKE_TAGS":False}' -s models/core/core__fact_blocks.sql -``` - -### Querying for existing tags on a model in snowflake - -```sql -select * -from table(livequery.information_schema.tag_references('livequery.core.fact_blocks', 'table')); -``` From a3c080c582731a0481355835c94c0f899d6f06af Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 18:23:26 -0600 Subject: [PATCH 17/23] profile --- dbt_project.yml | 2 +- profiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dbt_project.yml b/dbt_project.yml index 4a7b7aa..76dbb16 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -6,7 +6,7 @@ version: "1.0.0" config-version: 2 # This setting configures which "profile" dbt uses for this project. -profile: "reference" +profile: "fsc_utils" # These configurations specify where dbt should look for different types of files. # The `source-paths` config, for example, states that models in this project can be diff --git a/profiles.yml b/profiles.yml index 4888674..ee008ef 100644 --- a/profiles.yml +++ b/profiles.yml @@ -1,4 +1,4 @@ -reference: +fsc_utils: target: prod outputs: dev: From ef8796e158c7106e4e68e4ec7de6dd7111f8fb84 Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 18:24:15 -0600 Subject: [PATCH 18/23] sources --- models/sources.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/models/sources.yml b/models/sources.yml index 3513a51..18c2432 100644 --- a/models/sources.yml +++ b/models/sources.yml @@ -2,7 +2,7 @@ version: 2 sources: - name: crosschain - database: "{{ 'crosschain' if target.database == 'REFERENCE' else 'crosschain_dev' }}" + database: crosschain schema: core tables: - name: dim_date_hours \ No newline at end of file From a3b2e173ca689381b68a23d40e2e7f9d4347dee6 Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 18:25:53 -0600 Subject: [PATCH 19/23] minor update to readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 19aa0fa..72b952b 100644 --- a/README.md +++ b/README.md @@ -34,7 +34,7 @@ The `fsc_utils` dbt package is a centralized repository consisting of various db {% endset %} {% do run_query(sql) %} ``` -3. Note: fsc_utils.create_udfs() takes two parameters (drop_=False, schema=utils). Set `drop_` to `True` to drop existing functions or define `schema` for the functions (default set to `utils`). Params not required. +3. Note: fsc_utils.create_udfs() takes two parameters (drop_=False, schema="utils"). Set `drop_` to `True` to drop existing functions or define `schema` for the functions (default set to `utils`). Params not required. 4. Navigate to `packages.yml` in your respective repo. 5. Add the following: ``` From 07fd0c5677ef93831e7e1374d93c0f53f6c8f588 Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 19:39:27 -0600 Subject: [PATCH 20/23] added json rpc udf and removed unnecessary files --- .github/workflows/dbt_docs_update.yml | 68 ------------------ .github/workflows/dbt_run_adhoc.yml | 65 ----------------- README.md | 36 ++++++++++ macros/streamline/configs.yaml.sql | 52 ++++++++++++++ macros/streamline/functions.sql | 15 ++++ .../doc_descriptions/general/__overview__.md | 70 ------------------- models/sources.yml | 7 +- profiles.yml | 58 +++++++-------- 8 files changed, 133 insertions(+), 238 deletions(-) delete mode 100644 .github/workflows/dbt_docs_update.yml delete mode 100644 .github/workflows/dbt_run_adhoc.yml create mode 100644 macros/streamline/functions.sql delete mode 100644 models/doc_descriptions/general/__overview__.md diff --git a/.github/workflows/dbt_docs_update.yml b/.github/workflows/dbt_docs_update.yml deleted file mode 100644 index 4d70569..0000000 --- a/.github/workflows/dbt_docs_update.yml +++ /dev/null @@ -1,68 +0,0 @@ -name: docs_update - -on: - push: - branches: - - "main" - -env: - DBT_PROFILES_DIR: ./ - - ACCOUNT: "${{ vars.ACCOUNT }}" - ROLE: "${{ vars.ROLE }}" - USER: "${{ vars.USER }}" - PASSWORD: "${{ secrets.PASSWORD }}" - REGION: "${{ vars.REGION }}" - DATABASE: "${{ vars.DATABASE }}" - WAREHOUSE: "${{ vars.WAREHOUSE }}" - SCHEMA: "${{ vars.SCHEMA }}" - -concurrency: - group: ${{ github.workflow }} - -jobs: - scheduled_run: - name: docs_update - runs-on: ubuntu-latest - environment: - name: workflow_prod - - steps: - - uses: actions/checkout@v3 - - - uses: actions/setup-python@v1 - with: - python-version: "3.7.x" - - - name: install dependencies - run: | - pip install dbt-snowflake==${{ vars.DBT_VERSION }} - dbt deps - - name: checkout docs branch - run: | - git checkout -b docs origin/main - - - name: generate dbt docs - run: dbt docs generate -t prod - - - name: move files to docs directory - run: | - mkdir -p ./docs - cp target/{catalog.json,manifest.json,index.html} docs/ - - name: clean up target directory - run: dbt clean - - - name: check for changes - run: git status - - - name: stage changed files - run: git add . - - - name: commit changed files - run: | - git config user.email "abc@xyz" - git config user.name "github-actions" - git commit -am "Auto-update docs" - - name: push changes to docs - run: | - git push -f --set-upstream origin docs \ No newline at end of file diff --git a/.github/workflows/dbt_run_adhoc.yml b/.github/workflows/dbt_run_adhoc.yml deleted file mode 100644 index b6a0e0b..0000000 --- a/.github/workflows/dbt_run_adhoc.yml +++ /dev/null @@ -1,65 +0,0 @@ -name: dbt_run_adhoc -run-name: dbt_run_adhoc - -on: - workflow_dispatch: - branches: - - "main" - inputs: - environment: - type: choice - description: DBT Run Environment - required: true - options: - - dev - - prod - default: dev - warehouse: - type: choice - description: Snowflake warehouse - required: true - options: - - DBT - - DBT_CLOUD - - DBT_EMERGENCY - default: DBT - dbt_command: - type: string - description: 'DBT Run Command' - required: true - -env: - DBT_PROFILES_DIR: ./ - - ACCOUNT: "${{ vars.ACCOUNT }}" - ROLE: "${{ vars.ROLE }}" - USER: "${{ vars.USER }}" - PASSWORD: "${{ secrets.PASSWORD }}" - REGION: "${{ vars.REGION }}" - DATABASE: "${{ vars.DATABASE }}" - WAREHOUSE: "${{ inputs.warehouse }}" - SCHEMA: "${{ vars.SCHEMA }}" - -concurrency: - group: ${{ github.workflow }} - -jobs: - run_dbt_jobs: - runs-on: ubuntu-latest - environment: - name: workflow_${{ inputs.environment }} - - steps: - - uses: actions/checkout@v3 - - - uses: actions/setup-python@v1 - with: - python-version: "3.7.x" - - - name: install dependencies - run: | - pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click - dbt deps - - name: Run DBT Jobs - run: | - ${{ inputs.dbt_command }} \ No newline at end of file diff --git a/README.md b/README.md index 72b952b..dca43ef 100644 --- a/README.md +++ b/README.md @@ -43,6 +43,42 @@ The `fsc_utils` dbt package is a centralized repository consisting of various db 6. Run `dbt deps` to install the package 7. Run the macro `dbt run-operation create_udfs --var '{"UPDATE_UDFS_AND_SPS":True}'` +### **Overview of Available Functions** + +#### **UTILS Functions** + +- `utils.udf_hex_to_int`: Use this UDF to transform any hex string to integer + ``` + ex: Curve Swaps + + SELECT + regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data, + utils.hex_to_int(segmented_data [1] :: STRING) :: INTEGER AS tokens_sold + FROM + optimism.core.fact_event_logs + WHERE + topics [0] :: STRING IN ( + '0x8b3e96f2b889fa771c53c981b40daf005f63f637f1869f707052d15a3dd97140', + '0xd013ca23e77a65003c2c659c5442c00c805371b7fc1ebd4c206c41d1536bd90b' + ) + ``` +- `utils.udf_hex_to_string`: Use this UDF to transform any hexadecimal string to a regular string, removing any non-printable or control characters from the resulting string. + ``` + ex: Token Names + + WITH base AS ( + SELECT + '0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000005452617265202d204368616e74616c20486167656c202d20576f6d656e2773204575726f2032303232202d2032303232205371756164202d20576f6d656e2773204e6174696f6e616c205465616d202d2032303232000000000000000000000000' AS input_token_name + ) + + SELECT + utils.udf_hex_to_string(SUBSTR(input_token_name,(64*2+3),LEN(input_token_name))) AS output_token_name + FROM base; + + NOTE: The expression 64 * 2 + 3 in the query navigates to the 131st character of the hexadecimal string returned by an EVM blockchain contract's function, skipping metadata and adjusting for Snowflake's 1-based indexing. Keep in mind that the exact start of relevant data may vary between different contracts and functions. + + ``` + ## Resources * Learn more about dbt [in the docs](https://docs.getdbt.com/docs/introduction) diff --git a/macros/streamline/configs.yaml.sql b/macros/streamline/configs.yaml.sql index 7e09ec1..b4925eb 100644 --- a/macros/streamline/configs.yaml.sql +++ b/macros/streamline/configs.yaml.sql @@ -44,5 +44,57 @@ try_hex_decode_string(hex), '[\x00-\x1F\x7F-\x9F\xAD]', '', 1)) +- name: {{ schema }}.udf_json_rpc_call + signature: + - [method, STRING] + - [params, ARRAY] + return_type: OBJECT + options: | + NULL + LANGUAGE SQL + RETURNS NULL ON NULL INPUT + IMMUTABLE + sql: | + {{ sql_udf_json_rpc_call() }} +- name: {{ schema }}.udf_json_rpc_call + signature: + - [method, STRING] + - [params, OBJECT] + return_type: OBJECT + options: | + NULL + LANGUAGE SQL + RETURNS NULL ON NULL INPUT + IMMUTABLE + sql: | + {{ sql_udf_json_rpc_call() }} +- name: {{ schema }}.udf_json_rpc_call + signature: + - [method, STRING] + - [params, OBJECT] + - [id, STRING] + return_type: OBJECT + options: | + NULL + LANGUAGE SQL + RETURNS NULL ON NULL INPUT + IMMUTABLE + sql: | + {{ sql_udf_json_rpc_call(False) }} +- name: {{ schema }}.udf_json_rpc_call + signature: + - [method, STRING] + - [params, ARRAY] + - [id, STRING] + return_type: OBJECT + options: | + NULL + LANGUAGE SQL + RETURNS NULL ON NULL INPUT + IMMUTABLE + sql: | + {{ sql_udf_json_rpc_call(False) }} + + {% endmacro %} diff --git a/macros/streamline/functions.sql b/macros/streamline/functions.sql new file mode 100644 index 0000000..6944927 --- /dev/null +++ b/macros/streamline/functions.sql @@ -0,0 +1,15 @@ +{% macro sql_udf_json_rpc_call(use_default_id=True ) %} + SELECT IFF(method IS NULL or params IS NULL, + NULL, + { + 'jsonrpc': '2.0', + 'method': method, + 'params': params + {% if use_default_id %} + , 'id': hash(method, params)::string + {% else %} + , 'id': id + {% endif %} + } + ) +{% endmacro %} \ No newline at end of file diff --git a/models/doc_descriptions/general/__overview__.md b/models/doc_descriptions/general/__overview__.md deleted file mode 100644 index e0ddda4..0000000 --- a/models/doc_descriptions/general/__overview__.md +++ /dev/null @@ -1,70 +0,0 @@ -{% docs __overview__ %} - -# Welcome to the Flipside Crypto Utility Functions Documentation! - -## **What does this documentation cover?** -The documentation included here details the design of the utility functions available via [Flipside Crypto](https://flipsidecrypto.xyz/). For more information on how these functions are built, please see [the github repository.](https://github.com/FlipsideCrypto/fsc-utils) - -### **Overview of Available Functions** - -#### **UTILS Functions** - -- `utils.udf_hex_to_int`: Use this UDF to transform any hex string to integer - ``` - ex: Curve Swaps - - SELECT - regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data, - utils.hex_to_int(segmented_data [1] :: STRING) :: INTEGER AS tokens_sold - FROM - optimism.core.fact_event_logs - WHERE - topics [0] :: STRING IN ( - '0x8b3e96f2b889fa771c53c981b40daf005f63f637f1869f707052d15a3dd97140', - '0xd013ca23e77a65003c2c659c5442c00c805371b7fc1ebd4c206c41d1536bd90b' - ) - ``` -- `utils.udf_hex_to_string`: Use this UDF to transform any hexadecimal string to a regular string, removing any non-printable or control characters from the resulting string. - ``` - ex: Token Names - - WITH base AS ( - SELECT - '0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000005452617265202d204368616e74616c20486167656c202d20576f6d656e2773204575726f2032303232202d2032303232205371756164202d20576f6d656e2773204e6174696f6e616c205465616d202d2032303232000000000000000000000000' AS input_token_name - ) - - SELECT - utils.udf_hex_to_string(SUBSTR(input_token_name,(64*2+3),LEN(input_token_name))) AS output_token_name - FROM base; - - NOTE: The expression 64 * 2 + 3 in the query navigates to the 131st character of the hexadecimal string returned by an EVM blockchain contract's function, skipping metadata and adjusting for Snowflake's 1-based indexing. Keep in mind that the exact start of relevant data may vary between different contracts and functions. - - ``` - - -## **Using dbt docs** -### Navigation - -You can use the ```Project``` and ```Database``` navigation tabs on the left side of the window to explore the models in the project. - -### Database Tab - -This view shows relations (tables and views) grouped into database schemas. Note that ephemeral models are *not* shown in this interface, as they do not exist in the database. - -### Graph Exploration - -You can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models. - -On model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the Expand button at the top-right of this lineage pane, you'll be able to see all of the models that are used to build, or are built from, the model you're exploring. - -Once expanded, you'll be able to use the ```--models``` and ```--exclude``` model selection syntax to filter the models in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax). - -Note that you can also right-click on models to interactively filter and explore the graph. - -### **More information** -- [Flipside](https://flipsidecrypto.xyz/) -- [Tutorials](https://docs.flipsidecrypto.com/our-data/tutorials) -- [Github](https://github.com/FlipsideCrypto/fsc-utils) -- [What is dbt?](https://docs.getdbt.com/docs/introduction) - -{% enddocs %} \ No newline at end of file diff --git a/models/sources.yml b/models/sources.yml index 18c2432..c008029 100644 --- a/models/sources.yml +++ b/models/sources.yml @@ -1,8 +1,3 @@ version: 2 -sources: - - name: crosschain - database: crosschain - schema: core - tables: - - name: dim_date_hours \ No newline at end of file +sources: \ No newline at end of file diff --git a/profiles.yml b/profiles.yml index ee008ef..451da35 100644 --- a/profiles.yml +++ b/profiles.yml @@ -1,29 +1,29 @@ -fsc_utils: - target: prod - outputs: - dev: - type: snowflake - account: "{{ env_var('ACCOUNT') }}" - role: "{{ env_var('ROLE') }}" - user: "{{ env_var('USER') }}" - password: "{{ env_var('PASSWORD') }}" - region: "{{ env_var('REGION') }}" - database: "{{ env_var('DATABASE') }}" - warehouse: "{{ env_var('WAREHOUSE') }}" - schema: SILVER - threads: 4 - client_session_keep_alive: False - query_tag: fsc_utils_curator - prod: - type: snowflake - account: "{{ env_var('ACCOUNT') }}" - role: "{{ env_var('ROLE') }}" - user: "{{ env_var('USER') }}" - password: "{{ env_var('PASSWORD') }}" - region: "{{ env_var('REGION') }}" - database: "{{ env_var('DATABASE') }}" - warehouse: "{{ env_var('WAREHOUSE') }}" - schema: SILVER - threads: 4 - client_session_keep_alive: False - query_tag: fsc_utils_curator \ No newline at end of file +# fsc_utils: +# target: prod +# outputs: +# dev: +# type: snowflake +# account: "{{ env_var('ACCOUNT') }}" +# role: "{{ env_var('ROLE') }}" +# user: "{{ env_var('USER') }}" +# password: "{{ env_var('PASSWORD') }}" +# region: "{{ env_var('REGION') }}" +# database: "{{ env_var('DATABASE') }}" +# warehouse: "{{ env_var('WAREHOUSE') }}" +# schema: SILVER +# threads: 4 +# client_session_keep_alive: False +# query_tag: fsc_utils_curator +# prod: +# type: snowflake +# account: "{{ env_var('ACCOUNT') }}" +# role: "{{ env_var('ROLE') }}" +# user: "{{ env_var('USER') }}" +# password: "{{ env_var('PASSWORD') }}" +# region: "{{ env_var('REGION') }}" +# database: "{{ env_var('DATABASE') }}" +# warehouse: "{{ env_var('WAREHOUSE') }}" +# schema: SILVER +# threads: 4 +# client_session_keep_alive: False +# query_tag: fsc_utils_curator \ No newline at end of file From 3f74327db9b58a954fb42a53a6c26be8b76058f4 Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 19:40:47 -0600 Subject: [PATCH 21/23] sources --- models/sources.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/models/sources.yml b/models/sources.yml index c008029..e69de29 100644 --- a/models/sources.yml +++ b/models/sources.yml @@ -1,3 +0,0 @@ -version: 2 - -sources: \ No newline at end of file From 3274ce5fc07c5e150726b9d6ac11e842da9d1b6b Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 19:43:06 -0600 Subject: [PATCH 22/23] json rpc reference --- macros/streamline/configs.yaml.sql | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/macros/streamline/configs.yaml.sql b/macros/streamline/configs.yaml.sql index b4925eb..01c9eb5 100644 --- a/macros/streamline/configs.yaml.sql +++ b/macros/streamline/configs.yaml.sql @@ -55,7 +55,7 @@ RETURNS NULL ON NULL INPUT IMMUTABLE sql: | - {{ sql_udf_json_rpc_call() }} + {{ fsc_utils.sql_udf_json_rpc_call() }} - name: {{ schema }}.udf_json_rpc_call signature: - [method, STRING] @@ -67,7 +67,7 @@ RETURNS NULL ON NULL INPUT IMMUTABLE sql: | - {{ sql_udf_json_rpc_call() }} + {{ fsc_utils.sql_udf_json_rpc_call() }} - name: {{ schema }}.udf_json_rpc_call signature: - [method, STRING] @@ -80,7 +80,7 @@ RETURNS NULL ON NULL INPUT IMMUTABLE sql: | - {{ sql_udf_json_rpc_call(False) }} + {{ fsc_utils.sql_udf_json_rpc_call(False) }} - name: {{ schema }}.udf_json_rpc_call signature: - [method, STRING] @@ -93,7 +93,7 @@ RETURNS NULL ON NULL INPUT IMMUTABLE sql: | - {{ sql_udf_json_rpc_call(False) }} + {{ fsc_utils.sql_udf_json_rpc_call(False) }} {% endmacro %} From 18c7256556d956451e8560c7beb5dd60a31d3ae5 Mon Sep 17 00:00:00 2001 From: drethereum Date: Mon, 22 May 2023 19:45:22 -0600 Subject: [PATCH 23/23] remove other unnecessary files --- dbt_project.yml | 2 +- models/sources.yml | 0 profiles.yml | 29 ----------------------------- 3 files changed, 1 insertion(+), 30 deletions(-) delete mode 100644 models/sources.yml delete mode 100644 profiles.yml diff --git a/dbt_project.yml b/dbt_project.yml index 76dbb16..5715269 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -6,7 +6,7 @@ version: "1.0.0" config-version: 2 # This setting configures which "profile" dbt uses for this project. -profile: "fsc_utils" +# profile: "fsc_utils" # These configurations specify where dbt should look for different types of files. # The `source-paths` config, for example, states that models in this project can be diff --git a/models/sources.yml b/models/sources.yml deleted file mode 100644 index e69de29..0000000 diff --git a/profiles.yml b/profiles.yml deleted file mode 100644 index 451da35..0000000 --- a/profiles.yml +++ /dev/null @@ -1,29 +0,0 @@ -# fsc_utils: -# target: prod -# outputs: -# dev: -# type: snowflake -# account: "{{ env_var('ACCOUNT') }}" -# role: "{{ env_var('ROLE') }}" -# user: "{{ env_var('USER') }}" -# password: "{{ env_var('PASSWORD') }}" -# region: "{{ env_var('REGION') }}" -# database: "{{ env_var('DATABASE') }}" -# warehouse: "{{ env_var('WAREHOUSE') }}" -# schema: SILVER -# threads: 4 -# client_session_keep_alive: False -# query_tag: fsc_utils_curator -# prod: -# type: snowflake -# account: "{{ env_var('ACCOUNT') }}" -# role: "{{ env_var('ROLE') }}" -# user: "{{ env_var('USER') }}" -# password: "{{ env_var('PASSWORD') }}" -# region: "{{ env_var('REGION') }}" -# database: "{{ env_var('DATABASE') }}" -# warehouse: "{{ env_var('WAREHOUSE') }}" -# schema: SILVER -# threads: 4 -# client_session_keep_alive: False -# query_tag: fsc_utils_curator \ No newline at end of file