folder structure and tags

This commit is contained in:
drethereum 2023-07-31 14:18:56 -06:00
parent 3e0d7fe13e
commit 8e6c5e27a2
117 changed files with 210 additions and 80 deletions

View File

@ -41,7 +41,7 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m models/silver/abis
dbt run -m tag:abis

View File

@ -41,7 +41,7 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run --threads 2 --vars '{"OBSERV_FULL_TEST":True}' -m models/silver/_observability
dbt run --threads 2 --vars '{"OBSERV_FULL_TEST":True}' -m tag:observability

View File

@ -1,11 +1,11 @@
name: dbt_run_api_feeder_table
run-name: dbt_run_api_feeder_table
name: dbt_run_scheduled_non_realtime
run-name: dbt_run_scheduled_non_realtime
on:
workflow_dispatch:
schedule:
# Runs "at 7:00 UTC" (see https://crontab.guru)
- cron: '0 7 * * *'
# Runs "At minute 40, every hour" (see https://crontab.guru)
- cron: '40 * * * *'
env:
DBT_PROFILES_DIR: ./
@ -41,4 +41,4 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m models/silver/API_udf
dbt run -m tag:non_realtime

View File

@ -41,4 +41,5 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m models/silver/core/silver__decoded_logs.sql
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+tag:streamline_decoded_logs_realtime
dbt run -m tag:decoded_logs

View File

@ -41,4 +41,4 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run --threads 8 --vars '{"STREAMLINE_INVOKE_STREAMS":True,"WAIT":120}' -m models/silver/streamline/decoder/streamline__complete_decode_logs.sql models/silver/streamline/decoder/history
dbt run --threads 8 --vars '{"STREAMLINE_INVOKE_STREAMS":True,"WAIT":120}' -m 1+tag:streamline_decoded_logs_history

View File

@ -0,0 +1,69 @@
name: dbt_run_streamline_history_adhoc
run-name: dbt_run_streamline_history_adhoc
on:
workflow_dispatch:
branches:
- "main"
inputs:
environment:
type: choice
description: DBT Run Environment
required: true
options:
- dev
- prod
- prod_backfill
default: dev
warehouse:
type: choice
description: Snowflake warehouse
required: true
options:
- DBT
- DBT_CLOUD
- DBT_EMERGENCY
default: DBT
dbt_command:
type: choice
description: 'DBT Run Command'
required: true
options:
- dbt run --threads 8 --vars '{"STREAMLINE_INVOKE_STREAMS":True,"WAIT":120}' -m 1+tag:streamline_decoded_logs_history
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ inputs.warehouse }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_${{ inputs.environment }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}

View File

@ -41,4 +41,4 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/silver/streamline/core/realtime
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+tag:streamline_core_realtime

View File

@ -41,7 +41,7 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt test --exclude tag:full_test tag:recent_test models/silver/goerli
dbt test --exclude tag:full_test tag:recent_test tag:base_goerli

View File

@ -1,11 +1,11 @@
name: dbt_run_scheduled
run-name: dbt_run_scheduled
name: dbt_run_api_feeder_table
run-name: dbt_run_api_feeder_table
on:
workflow_dispatch:
schedule:
# Runs "At minute 40, every hour" (see https://crontab.guru)
- cron: '40 * * * *'
# Runs "at 7:00 UTC" (see https://crontab.guru)
- cron: '0 7 * * *'
env:
DBT_PROFILES_DIR: ./
@ -41,5 +41,7 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run --exclude models/silver/abis models/silver/goerli models/silver/streamline models/bronze/eth_goerli models/silver/_observability models/silver/API_udf models/silver/core/silver__decoded_logs.sql
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m models/silver/streamline/decoder/streamline__complete_decode_logs.sql models/silver/streamline/decoder/streamline__decode_logs_realtime.sql
dbt run -m models/silver/API_udf
# moved to non_realtime job to match ethereum's structure
# relevant_abi_contracts now running in abi_refresh job
# relevant contracts now in non_realtime incremental job (will need to be here when we add contract_reads anyway)

View File

@ -1,7 +1,8 @@
{{ config(
materialized = 'incremental',
unique_key = "contract_address",
full_refresh = false
full_refresh = false,
tags = ['non_realtime']
) }}
WITH api_keys AS (

View File

@ -1,7 +1,8 @@
{{ config(
materialized = 'incremental',
unique_key = "contract_address",
full_refresh = false
full_refresh = false,
tags = ['non_realtime']
) }}
WITH base AS (

View File

@ -1,7 +1,8 @@
{{ config(
materialized = 'incremental',
unique_key = 'batch_id',
full_refresh = False
full_refresh = false,
tags = ['base_goerli']
) }}
WITH request_details AS (

View File

@ -2,7 +2,8 @@
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
merge_update_columns = ["block_number"]
merge_update_columns = ["block_number"],
tags = ['base_goerli']
) }}
WITH meta AS (

View File

@ -1,7 +1,8 @@
{{ config (
materialized = "incremental",
unique_key = "tx_hash",
cluster_by = "ROUND(block_number, -3)"
cluster_by = "ROUND(block_number, -3)",
tags = ['base_goerli']
) }}
WITH meta AS (

View File

@ -2,7 +2,8 @@
materialized = "incremental",
unique_key = "ID",
cluster_by = "ROUND(block_number, -3)",
merge_update_columns = ["ID"]
merge_update_columns = ["ID"],
tags = ['base_goerli']
) }}
WITH meta AS (

View File

@ -2,7 +2,8 @@
materialized = "incremental",
unique_key = "tx_hash",
cluster_by = "ROUND(block_number, -3)",
merge_update_columns = ["tx_hash"]
merge_update_columns = ["tx_hash"],
tags = ['base_goerli']
) }}
WITH meta AS (

View File

@ -2,7 +2,8 @@
materialized = "incremental",
unique_key = "block_number",
cluster_by = "block_timestamp::date",
merge_update_columns = ["block_number"]
merge_update_columns = ["block_number"],
tags = ['base_goerli']
) }}
SELECT

View File

@ -1,7 +1,8 @@
{{ config (
materialized = "incremental",
unique_key = "_log_id",
cluster_by = "ROUND(block_number, -3)"
cluster_by = "ROUND(block_number, -3)",
tags = ['base_goerli']
) }}
WITH logs_response AS (

View File

@ -2,7 +2,8 @@
materialized = "incremental",
unique_key = "_call_id",
cluster_by = "ROUND(block_number, -3)",
merge_update_columns = ["_call_id"]
merge_update_columns = ["_call_id"],
tags = ['base_goerli']
) }}
WITH new_txs AS (

View File

@ -1,7 +1,8 @@
{{ config (
materialized = "incremental",
unique_key = "tx_hash",
cluster_by = "BLOCK_TIMESTAMP::DATE"
cluster_by = "BLOCK_TIMESTAMP::DATE",
tags = ['base_goerli']
) }}
WITH flat_base AS (

View File

@ -1,6 +1,7 @@
{{ config (
materialized = "table",
unique_key = "batch_id"
unique_key = "batch_id",
tags = ['base_goerli']
) }}
WITH inputs AS (

View File

@ -1,6 +1,7 @@
{{ config (
materialized = "table",
unique_key = "batch_id"
unique_key = "batch_id",
tags = ['base_goerli']
) }}
WITH ranges AS (

View File

@ -1,5 +1,6 @@
{{ config (
materialized = "view"
materialized = "view",
tags = ['base_goerli']
) }}
WITH base AS (

View File

@ -1,7 +1,8 @@
{{ config(
materialized = 'incremental',
unique_key = '_log_id',
cluster_by = ['round(block_number,-3)']
cluster_by = ['round(block_number,-3)'],
tags = ['base_goerli']
) }}
WITH base AS (

View File

@ -1,7 +1,8 @@
{{ config(
materialized = 'incremental',
unique_key = 'test_timestamp',
full_refresh = false
full_refresh = false,
tags = ['observability']
) }}
WITH summary_stats AS (

View File

@ -1,7 +1,8 @@
{{ config(
materialized = 'incremental',
unique_key = 'test_timestamp',
full_refresh = false
full_refresh = false,
tags = ['observability']
) }}
WITH summary_stats AS (

View File

@ -1,7 +1,8 @@
{{ config(
materialized = 'incremental',
unique_key = 'test_timestamp',
full_refresh = false
full_refresh = false,
tags = ['observability']
) }}
WITH summary_stats AS (

View File

@ -1,7 +1,8 @@
{{ config(
materialized = 'incremental',
unique_key = 'test_timestamp',
full_refresh = false
full_refresh = false,
tags = ['observability']
) }}
WITH summary_stats AS (

View File

@ -1,7 +1,8 @@
{{ config(
materialized = 'incremental',
unique_key = 'test_timestamp',
full_refresh = false
full_refresh = false,
tags = ['observability']
) }}
WITH summary_stats AS (

View File

@ -1,7 +1,8 @@
{{ config (
materialized = "incremental",
unique_key = "contract_address",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(contract_address)"
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(contract_address)",
tags = ['abis']
) }}
WITH override_abis AS (

View File

@ -1,6 +1,7 @@
{{ config (
materialized = "incremental",
unique_key = "abi_id"
unique_key = "abi_id",
tags = ['abis']
) }}
WITH bytecodes AS (

View File

@ -1,5 +1,6 @@
{{ config (
materialized = 'table'
materialized = 'table',
tags = ['abis']
) }}
WITH abi_base AS (

View File

@ -1,5 +1,6 @@
{{ config(
materialized = 'view'
materialized = 'view',
tags = ['abis']
) }}
SELECT

View File

@ -1,5 +1,6 @@
{{ config (
materialized = "table"
materialized = "table",
tags = ['abis']
) }}
WITH base AS (

View File

@ -1,6 +1,7 @@
{{ config(
materialized = 'table',
unique_key = "contract_address"
unique_key = "contract_address",
tags = ['abis']
) }}
WITH base AS (

View File

@ -1,7 +1,8 @@
{{ config (
materialized = "incremental",
unique_key = "id",
merge_update_columns = ["id"]
merge_update_columns = ["id"],
tags = ['abis']
) }}
WITH base AS (

View File

@ -1,6 +1,7 @@
{{ config(
materialized = 'incremental',
unique_key = "contract_address"
unique_key = "contract_address",
tags = ['abis']
) }}
WITH base AS (

View File

@ -3,7 +3,7 @@
materialized = 'incremental',
unique_key = "block_number",
cluster_by = "block_timestamp::date",
tags = ['core'],
tags = ['core','non_realtime'],
full_refresh = false
) }}

View File

@ -4,7 +4,8 @@
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = "round(block_number,-3)",
full_refresh = false
full_refresh = false,
tags = ['non_realtime']
) }}
WITH base AS (

View File

@ -1,6 +1,7 @@
{{ config(
materialized = 'incremental',
unique_key = 'contract_address'
unique_key = 'contract_address',
tags = ['non_realtime']
) }}
WITH base_metadata AS (

View File

@ -1,6 +1,7 @@
{{ config (
materialized = "incremental",
unique_key = "created_contract_address"
unique_key = "created_contract_address",
tags = ['non_realtime']
) }}
SELECT

View File

@ -4,7 +4,8 @@
unique_key = ['block_number', 'event_index'],
cluster_by = "block_timestamp::date",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION",
full_refresh = false
full_refresh = false,
tags = ['decoded_logs']
) }}
WITH base_data AS (

View File

@ -4,7 +4,7 @@
unique_key = "block_number",
cluster_by = "block_timestamp::date, _inserted_timestamp::date",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION",
tags = ['core'],
tags = ['core','non_realtime'],
full_refresh = false
) }}

View File

@ -5,7 +5,7 @@
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(tx_hash)",
tags = ['core'],
tags = ['core','non_realtime'],
full_refresh = false
) }}

View File

@ -1,6 +1,7 @@
{{ config(
materialized = 'table',
unique_key = "contract_address"
unique_key = "contract_address",
tags = ['non_realtime']
) }}
SELECT

View File

@ -5,7 +5,7 @@
unique_key = "block_number",
cluster_by = "block_timestamp::date, _inserted_timestamp::date",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION",
tags = ['core'],
tags = ['core','non_realtime'],
full_refresh = false
) }}

View File

@ -5,7 +5,7 @@
unique_key = "block_number",
cluster_by = "block_timestamp::date, _inserted_timestamp::date",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION",
tags = ['core'],
tags = ['core','non_realtime'],
full_refresh = false
) }}

View File

@ -1,7 +1,8 @@
{{ config(
materialized = 'incremental',
unique_key = '_log_id',
cluster_by = ['block_timestamp::DATE', '_inserted_timestamp::DATE']
cluster_by = ['block_timestamp::DATE', '_inserted_timestamp::DATE'],
tags = ['non_realtime']
) }}
WITH logs AS (

View File

@ -1,6 +1,7 @@
{{ config(
materialized = 'incremental',
unique_key = "block_number"
unique_key = "block_number",
tags = ['non_realtime']
) }}
SELECT

View File

@ -3,7 +3,8 @@
post_hook = if_data_call_function(
func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'confirm_blocks', 'sql_limit', {{var('sql_limit','100000')}}, 'producer_batch_size', {{var('producer_batch_size','100000')}}, 'worker_batch_size', {{var('worker_batch_size','100000')}}, 'batch_call_limit', {{var('batch_call_limit','10')}}))",
target = "{{this.schema}}.{{this.identifier}}"
)
),
tags = ['streamline_core_realtime']
) }}
with tbl AS (

View File

@ -3,7 +3,8 @@
post_hook = if_data_call_function(
func = "{{this.schema}}.udf_bulk_get_traces(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'debug_traceBlockByNumber', 'sql_limit', {{var('sql_limit','100000')}}, 'producer_batch_size', {{var('producer_batch_size','100000')}}, 'worker_batch_size', {{var('worker_batch_size','50000')}}, 'batch_call_limit', {{var('batch_call_limit','1')}}))",
target = "{{this.schema}}.{{this.identifier}}"
)
),
tags = ['streamline_core_realtime']
) }}
WITH blocks AS (

View File

@ -3,7 +3,8 @@
post_hook = if_data_call_function(
func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'qn_getBlockWithReceipts', 'sql_limit', {{var('sql_limit','100000')}}, 'producer_batch_size', {{var('producer_batch_size','100000')}}, 'worker_batch_size', {{var('worker_batch_size','100000')}}, 'batch_call_limit', {{var('batch_call_limit','10')}}))",
target = "{{this.schema}}.{{this.identifier}}"
)
),
tags = ['streamline_core_realtime']
) }}
WITH blocks AS (

View File

@ -1,6 +1,7 @@
{{ config (
materialized = "view",
post_hook = [if_data_call_function( func = "{{model.schema}}.udf_bulk_decode_logs(object_construct('sql_source', '{{model.alias}}','producer_batch_size', 20000000,'producer_limit_size', {{var('row_limit',7500000)}}))", target = "{{model.schema}}.{{model.alias}}" ) ,if_data_call_wait()]
post_hook = [if_data_call_function( func = "{{model.schema}}.udf_bulk_decode_logs(object_construct('sql_source', '{{model.alias}}','producer_batch_size', 20000000,'producer_limit_size', {{var('row_limit',7500000)}}))", target = "{{model.schema}}.{{model.alias}}" ) ,if_data_call_wait()],
tags = ['streamline_decoded_logs_history']
) }}
{% set start = this.identifier.split("_") [-2] %}

View File

@ -1,6 +1,7 @@
{{ config (
materialized = "view",
post_hook = [if_data_call_function( func = "{{model.schema}}.udf_bulk_decode_logs(object_construct('sql_source', '{{model.alias}}','producer_batch_size', 20000000,'producer_limit_size', {{var('row_limit',7500000)}}))", target = "{{model.schema}}.{{model.alias}}" ) ,if_data_call_wait()]
post_hook = [if_data_call_function( func = "{{model.schema}}.udf_bulk_decode_logs(object_construct('sql_source', '{{model.alias}}','producer_batch_size', 20000000,'producer_limit_size', {{var('row_limit',7500000)}}))", target = "{{model.schema}}.{{model.alias}}" ) ,if_data_call_wait()],
tags = ['streamline_decoded_logs_history']
) }}
{% set start = this.identifier.split("_") [-2] %}

View File

@ -1,6 +1,7 @@
{{ config (
materialized = "view",
post_hook = [if_data_call_function( func = "{{model.schema}}.udf_bulk_decode_logs(object_construct('sql_source', '{{model.alias}}','producer_batch_size', 20000000,'producer_limit_size', {{var('row_limit',7500000)}}))", target = "{{model.schema}}.{{model.alias}}" ) ,if_data_call_wait()]
post_hook = [if_data_call_function( func = "{{model.schema}}.udf_bulk_decode_logs(object_construct('sql_source', '{{model.alias}}','producer_batch_size', 20000000,'producer_limit_size', {{var('row_limit',7500000)}}))", target = "{{model.schema}}.{{model.alias}}" ) ,if_data_call_wait()],
tags = ['streamline_decoded_logs_history']
) }}
{% set start = this.identifier.split("_") [-2] %}

Some files were not shown because too many files have changed in this diff Show More