From bf01e3486e49deac5dd254794f643a93e715b76e Mon Sep 17 00:00:00 2001 From: drethereum Date: Wed, 26 Feb 2025 10:26:43 -0700 Subject: [PATCH] initial set up --- .../workflows/dbt_deploy_new_workflows.yml | 45 ++ .github/workflows/dbt_docs_update.yml | 76 ++++ .github/workflows/dbt_integration_test.yml | 17 + .github/workflows/dbt_run_adhoc.yml | 66 +++ .github/workflows/dbt_run_deployment.yml | 67 +++ .github/workflows/dbt_run_dev_refresh.yml | 68 +++ .github/workflows/dbt_run_scheduled_main.yml | 45 ++ .../dbt_run_streamline_chainhead.yml | 49 ++ .../workflows/dbt_run_streamline_history.yml | 45 ++ .github/workflows/dbt_test_daily.yml | 49 ++ .github/workflows/dbt_test_intraday.yml | 49 ++ .github/workflows/dbt_test_monthly.yml | 49 ++ .gitignore | 20 + README.md | 145 ++++++ analysis/.gitkeep | 0 data/.gitkeep | 0 data/github_actions__workflows.csv | 4 + dbt_project.yml | 122 +++++ macros/custom_naming_macros.sql | 11 + macros/dbt/get_merge_sql.sql | 44 ++ macros/dbt/get_tmp_relation_type.sql | 8 + makefile | 44 ++ models/__overview__.md | 72 +++ .../github_actions__current_task_status.sql | 6 + .../github_actions__task_history.sql | 6 + .../github_actions__task_performance.sql | 6 + .../github_actions__task_schedule.sql | 6 + .../github_actions/github_actions__tasks.sql | 6 + models/sources.yml | 45 ++ .../streamline/bronze_testnet__blocks.sql | 41 ++ .../streamline/bronze_testnet__blocks_fr.sql | 42 ++ .../bronze_testnet__confirm_blocks.sql | 41 ++ .../bronze_testnet__confirm_blocks_fr.sql | 42 ++ .../streamline/bronze_testnet__receipts.sql | 41 ++ .../bronze_testnet__receipts_fr.sql | 42 ++ .../streamline/bronze_testnet__traces.sql | 41 ++ .../streamline/bronze_testnet__traces_fr.sql | 42 ++ .../bronze_testnet__transactions.sql | 41 ++ .../bronze_testnet__transactions_fr.sql | 42 ++ .../bronze_api_testnet__token_reads.sql | 130 ++++++ .../bronze_api_testnet__token_reads.yml | 15 + .../core/gold/testnet__dim_contracts.sql | 46 ++ .../core/gold/testnet__dim_contracts.yml | 28 ++ .../core/gold/testnet__fact_blocks.sql | 73 +++ .../core/gold/testnet__fact_blocks.yml | 66 +++ .../core/gold/testnet__fact_event_logs.sql | 217 +++++++++ .../core/gold/testnet__fact_event_logs.yml | 46 ++ .../core/gold/testnet__fact_traces.sql | 420 ++++++++++++++++++ .../core/gold/testnet__fact_traces.yml | 62 +++ .../core/gold/testnet__fact_transactions.sql | 346 +++++++++++++++ .../core/gold/testnet__fact_transactions.yml | 72 +++ .../test_gold_testnet__fact_blocks_full.sql | 9 + .../test_gold_testnet__fact_blocks_full.yml | 138 ++++++ .../test_gold_testnet__fact_blocks_recent.sql | 16 + .../test_gold_testnet__fact_blocks_recent.yml | 147 ++++++ ...est_gold_testnet__fact_event_logs_full.sql | 9 + ...est_gold_testnet__fact_event_logs_full.yml | 102 +++++ ...t_gold_testnet__fact_event_logs_recent.sql | 16 + ...t_gold_testnet__fact_event_logs_recent.yml | 101 +++++ .../test_gold_testnet__fact_traces_full.sql | 9 + .../test_gold_testnet__fact_traces_full.yml | 122 +++++ .../test_gold_testnet__fact_traces_recent.sql | 16 + .../test_gold_testnet__fact_traces_recent.yml | 122 +++++ ...t_gold_testnet__fact_transactions_full.sql | 9 + ...t_gold_testnet__fact_transactions_full.yml | 125 ++++++ ...gold_testnet__fact_transactions_recent.sql | 16 + ...gold_testnet__fact_transactions_recent.yml | 124 ++++++ .../core/silver/silver_testnet__blocks.sql | 41 ++ .../core/silver/silver_testnet__contracts.sql | 112 +++++ .../silver_testnet__created_contracts.sql | 44 ++ .../core/silver/silver_testnet__receipts.sql | 44 ++ .../silver_testnet__relevant_contracts.sql | 134 ++++++ .../core/silver/silver_testnet__traces.sql | 136 ++++++ .../silver/silver_testnet__transactions.sql | 44 ++ .../test_silver_testnet__blocks_full.sql | 9 + .../test_silver_testnet__blocks_full.yml | 51 +++ .../test_silver_testnet__blocks_recent.sql | 16 + .../test_silver_testnet__blocks_recent.yml | 51 +++ .../test_silver_testnet__receipts_full.sql | 9 + .../test_silver_testnet__receipts_full.yml | 49 ++ .../test_silver_testnet__receipts_recent.sql | 16 + .../test_silver_testnet__receipts_recent.yml | 49 ++ .../test_silver_testnet__traces_full.sql | 9 + .../test_silver_testnet__traces_full.yml | 52 +++ .../test_silver_testnet__traces_recent.sql | 16 + .../test_silver_testnet__traces_recent.yml | 52 +++ ...test_silver_testnet__transactions_full.sql | 9 + ...test_silver_testnet__transactions_full.yml | 53 +++ ...st_silver_testnet__transactions_recent.sql | 16 + ...st_silver_testnet__transactions_recent.yml | 53 +++ .../streamline__testnet_blocks_complete.sql | 33 ++ .../streamline__testnet_receipts_complete.sql | 33 ++ .../streamline__testnet_traces_complete.sql | 33 ++ ...eamline__testnet_transactions_complete.sql | 33 ++ ...e__testnet_blocks_transactions_history.sql | 56 +++ .../streamline__testnet_receipts_history.sql | 55 +++ .../streamline__testnet_traces_history.sql | 55 +++ ...__testnet_blocks_transactions_realtime.sql | 54 +++ .../streamline__testnet_receipts_realtime.sql | 53 +++ .../streamline__testnet_traces_realtime.sql | 53 +++ .../streamline__get_testnet_chainhead.sql | 28 ++ .../streamline__get_testnet_chainhead.yml | 9 + .../streamline/streamline__testnet_blocks.sql | 28 ++ .../testnet/utils/_testnet_block_lookback.sql | 11 + .../testnet/utils/utils__number_sequence.sql | 19 + packages.yml | 3 + profiles.yml | 31 ++ requirements.txt | 2 + selectors.yml | 7 + snapshots/.gitkeep | 0 110 files changed, 5848 insertions(+) create mode 100644 .github/workflows/dbt_deploy_new_workflows.yml create mode 100644 .github/workflows/dbt_docs_update.yml create mode 100644 .github/workflows/dbt_integration_test.yml create mode 100644 .github/workflows/dbt_run_adhoc.yml create mode 100644 .github/workflows/dbt_run_deployment.yml create mode 100644 .github/workflows/dbt_run_dev_refresh.yml create mode 100644 .github/workflows/dbt_run_scheduled_main.yml create mode 100644 .github/workflows/dbt_run_streamline_chainhead.yml create mode 100644 .github/workflows/dbt_run_streamline_history.yml create mode 100644 .github/workflows/dbt_test_daily.yml create mode 100644 .github/workflows/dbt_test_intraday.yml create mode 100644 .github/workflows/dbt_test_monthly.yml create mode 100644 .gitignore create mode 100644 README.md create mode 100644 analysis/.gitkeep create mode 100644 data/.gitkeep create mode 100644 data/github_actions__workflows.csv create mode 100644 dbt_project.yml create mode 100644 macros/custom_naming_macros.sql create mode 100644 macros/dbt/get_merge_sql.sql create mode 100644 macros/dbt/get_tmp_relation_type.sql create mode 100644 makefile create mode 100644 models/__overview__.md create mode 100644 models/github_actions/github_actions__current_task_status.sql create mode 100644 models/github_actions/github_actions__task_history.sql create mode 100644 models/github_actions/github_actions__task_performance.sql create mode 100644 models/github_actions/github_actions__task_schedule.sql create mode 100644 models/github_actions/github_actions__tasks.sql create mode 100644 models/sources.yml create mode 100644 models/testnet/core/bronze/streamline/bronze_testnet__blocks.sql create mode 100644 models/testnet/core/bronze/streamline/bronze_testnet__blocks_fr.sql create mode 100644 models/testnet/core/bronze/streamline/bronze_testnet__confirm_blocks.sql create mode 100644 models/testnet/core/bronze/streamline/bronze_testnet__confirm_blocks_fr.sql create mode 100644 models/testnet/core/bronze/streamline/bronze_testnet__receipts.sql create mode 100644 models/testnet/core/bronze/streamline/bronze_testnet__receipts_fr.sql create mode 100644 models/testnet/core/bronze/streamline/bronze_testnet__traces.sql create mode 100644 models/testnet/core/bronze/streamline/bronze_testnet__traces_fr.sql create mode 100644 models/testnet/core/bronze/streamline/bronze_testnet__transactions.sql create mode 100644 models/testnet/core/bronze/streamline/bronze_testnet__transactions_fr.sql create mode 100644 models/testnet/core/bronze/token_reads/bronze_api_testnet__token_reads.sql create mode 100644 models/testnet/core/bronze/token_reads/bronze_api_testnet__token_reads.yml create mode 100644 models/testnet/core/gold/testnet__dim_contracts.sql create mode 100644 models/testnet/core/gold/testnet__dim_contracts.yml create mode 100644 models/testnet/core/gold/testnet__fact_blocks.sql create mode 100644 models/testnet/core/gold/testnet__fact_blocks.yml create mode 100644 models/testnet/core/gold/testnet__fact_event_logs.sql create mode 100644 models/testnet/core/gold/testnet__fact_event_logs.yml create mode 100644 models/testnet/core/gold/testnet__fact_traces.sql create mode 100644 models/testnet/core/gold/testnet__fact_traces.yml create mode 100644 models/testnet/core/gold/testnet__fact_transactions.sql create mode 100644 models/testnet/core/gold/testnet__fact_transactions.yml create mode 100644 models/testnet/core/gold/tests/blocks/test_gold_testnet__fact_blocks_full.sql create mode 100644 models/testnet/core/gold/tests/blocks/test_gold_testnet__fact_blocks_full.yml create mode 100644 models/testnet/core/gold/tests/blocks/test_gold_testnet__fact_blocks_recent.sql create mode 100644 models/testnet/core/gold/tests/blocks/test_gold_testnet__fact_blocks_recent.yml create mode 100644 models/testnet/core/gold/tests/event_logs/test_gold_testnet__fact_event_logs_full.sql create mode 100644 models/testnet/core/gold/tests/event_logs/test_gold_testnet__fact_event_logs_full.yml create mode 100644 models/testnet/core/gold/tests/event_logs/test_gold_testnet__fact_event_logs_recent.sql create mode 100644 models/testnet/core/gold/tests/event_logs/test_gold_testnet__fact_event_logs_recent.yml create mode 100644 models/testnet/core/gold/tests/traces/test_gold_testnet__fact_traces_full.sql create mode 100644 models/testnet/core/gold/tests/traces/test_gold_testnet__fact_traces_full.yml create mode 100644 models/testnet/core/gold/tests/traces/test_gold_testnet__fact_traces_recent.sql create mode 100644 models/testnet/core/gold/tests/traces/test_gold_testnet__fact_traces_recent.yml create mode 100644 models/testnet/core/gold/tests/transactions/test_gold_testnet__fact_transactions_full.sql create mode 100644 models/testnet/core/gold/tests/transactions/test_gold_testnet__fact_transactions_full.yml create mode 100644 models/testnet/core/gold/tests/transactions/test_gold_testnet__fact_transactions_recent.sql create mode 100644 models/testnet/core/gold/tests/transactions/test_gold_testnet__fact_transactions_recent.yml create mode 100644 models/testnet/core/silver/silver_testnet__blocks.sql create mode 100644 models/testnet/core/silver/silver_testnet__contracts.sql create mode 100644 models/testnet/core/silver/silver_testnet__created_contracts.sql create mode 100644 models/testnet/core/silver/silver_testnet__receipts.sql create mode 100644 models/testnet/core/silver/silver_testnet__relevant_contracts.sql create mode 100644 models/testnet/core/silver/silver_testnet__traces.sql create mode 100644 models/testnet/core/silver/silver_testnet__transactions.sql create mode 100644 models/testnet/core/silver/tests/blocks/test_silver_testnet__blocks_full.sql create mode 100644 models/testnet/core/silver/tests/blocks/test_silver_testnet__blocks_full.yml create mode 100644 models/testnet/core/silver/tests/blocks/test_silver_testnet__blocks_recent.sql create mode 100644 models/testnet/core/silver/tests/blocks/test_silver_testnet__blocks_recent.yml create mode 100644 models/testnet/core/silver/tests/receipts/test_silver_testnet__receipts_full.sql create mode 100644 models/testnet/core/silver/tests/receipts/test_silver_testnet__receipts_full.yml create mode 100644 models/testnet/core/silver/tests/receipts/test_silver_testnet__receipts_recent.sql create mode 100644 models/testnet/core/silver/tests/receipts/test_silver_testnet__receipts_recent.yml create mode 100644 models/testnet/core/silver/tests/traces/test_silver_testnet__traces_full.sql create mode 100644 models/testnet/core/silver/tests/traces/test_silver_testnet__traces_full.yml create mode 100644 models/testnet/core/silver/tests/traces/test_silver_testnet__traces_recent.sql create mode 100644 models/testnet/core/silver/tests/traces/test_silver_testnet__traces_recent.yml create mode 100644 models/testnet/core/silver/tests/transactions/test_silver_testnet__transactions_full.sql create mode 100644 models/testnet/core/silver/tests/transactions/test_silver_testnet__transactions_full.yml create mode 100644 models/testnet/core/silver/tests/transactions/test_silver_testnet__transactions_recent.sql create mode 100644 models/testnet/core/silver/tests/transactions/test_silver_testnet__transactions_recent.yml create mode 100644 models/testnet/core/streamline/complete/streamline__testnet_blocks_complete.sql create mode 100644 models/testnet/core/streamline/complete/streamline__testnet_receipts_complete.sql create mode 100644 models/testnet/core/streamline/complete/streamline__testnet_traces_complete.sql create mode 100644 models/testnet/core/streamline/complete/streamline__testnet_transactions_complete.sql create mode 100644 models/testnet/core/streamline/history/streamline__testnet_blocks_transactions_history.sql create mode 100644 models/testnet/core/streamline/history/streamline__testnet_receipts_history.sql create mode 100644 models/testnet/core/streamline/history/streamline__testnet_traces_history.sql create mode 100644 models/testnet/core/streamline/realtime/streamline__testnet_blocks_transactions_realtime.sql create mode 100644 models/testnet/core/streamline/realtime/streamline__testnet_receipts_realtime.sql create mode 100644 models/testnet/core/streamline/realtime/streamline__testnet_traces_realtime.sql create mode 100644 models/testnet/core/streamline/streamline__get_testnet_chainhead.sql create mode 100644 models/testnet/core/streamline/streamline__get_testnet_chainhead.yml create mode 100644 models/testnet/core/streamline/streamline__testnet_blocks.sql create mode 100644 models/testnet/utils/_testnet_block_lookback.sql create mode 100644 models/testnet/utils/utils__number_sequence.sql create mode 100644 packages.yml create mode 100644 profiles.yml create mode 100644 requirements.txt create mode 100644 selectors.yml create mode 100644 snapshots/.gitkeep diff --git a/.github/workflows/dbt_deploy_new_workflows.yml b/.github/workflows/dbt_deploy_new_workflows.yml new file mode 100644 index 0000000..6c44671 --- /dev/null +++ b/.github/workflows/dbt_deploy_new_workflows.yml @@ -0,0 +1,45 @@ +name: dbt_deploy_new_workflows +run-name: dbt_deploy_new_workflows + +on: + workflow_dispatch: + branches: + - "main" + +env: + DBT_PROFILES_DIR: ./ + + ACCOUNT: "${{ vars.ACCOUNT }}" + ROLE: "${{ vars.ROLE }}" + USER: "${{ vars.USER }}" + PASSWORD: "${{ secrets.PASSWORD }}" + REGION: "${{ vars.REGION }}" + DATABASE: "${{ vars.DATABASE }}" + WAREHOUSE: "${{ vars.WAREHOUSE }}" + SCHEMA: "${{ vars.SCHEMA }}" + +concurrency: + group: ${{ github.workflow }} + +jobs: + run_dbt_jobs: + runs-on: ubuntu-latest + environment: + name: workflow_test + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "pip" + + - name: install dependencies + run: | + pip install -r requirements.txt + dbt deps + + - name: Deploy New Github Actions + run: | + make deploy_new_github_action DBT_TARGET=prod \ No newline at end of file diff --git a/.github/workflows/dbt_docs_update.yml b/.github/workflows/dbt_docs_update.yml new file mode 100644 index 0000000..d08538e --- /dev/null +++ b/.github/workflows/dbt_docs_update.yml @@ -0,0 +1,76 @@ +name: docs_update + +on: + push: + branches: + - "main" + +env: + USE_VARS: "${{ vars.USE_VARS }}" + DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}" + DBT_VERSION: "${{ vars.DBT_VERSION }}" + ACCOUNT: "${{ vars.ACCOUNT }}" + ROLE: "${{ vars.ROLE }}" + USER: "${{ vars.USER }}" + PASSWORD: "${{ secrets.PASSWORD }}" + REGION: "${{ vars.REGION }}" + DATABASE: "${{ vars.DATABASE }}" + WAREHOUSE: "${{ vars.WAREHOUSE }}" + SCHEMA: "${{ vars.SCHEMA }}" + +concurrency: + group: ${{ github.workflow }} + +jobs: + run_dbt_jobs: + runs-on: ubuntu-latest + environment: + name: workflow_prod + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "pip" + + - name: install dependencies + run: | + pip install -r requirements.txt + dbt deps + + - name: refresh ddl for datashare + run: | + cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi; + + - name: checkout docs branch + run: | + git checkout -B docs origin/main + + - name: generate dbt docs + run: dbt docs generate -t prod + + - name: move files to docs directory + run: | + mkdir -p ./docs + cp target/{catalog.json,manifest.json,index.html} docs/ + + - name: clean up target directory + run: dbt clean + + - name: check for changes + run: git status + + - name: stage changed files + run: git add . + + - name: commit changed files + run: | + git config user.email "abc@xyz" + git config user.name "github-actions" + git commit -am "Auto-update docs" + + - name: push changes to docs + run: | + git push -f --set-upstream origin docs \ No newline at end of file diff --git a/.github/workflows/dbt_integration_test.yml b/.github/workflows/dbt_integration_test.yml new file mode 100644 index 0000000..1646d61 --- /dev/null +++ b/.github/workflows/dbt_integration_test.yml @@ -0,0 +1,17 @@ +name: dbt_run_integration_test +run-name: ${{ github.event.inputs.branch }} + +on: + workflow_dispatch: + +concurrency: ${{ github.workflow }} + +jobs: + called_workflow_template: + uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt.yml@main + with: + command: > + dbt test --selector 'integration_tests' + environment: ${{ github.ref == 'refs/heads/main' && 'workflow_prod' || 'workflow_dev' }} + warehouse: ${{ vars.WAREHOUSE }} + secrets: inherit \ No newline at end of file diff --git a/.github/workflows/dbt_run_adhoc.yml b/.github/workflows/dbt_run_adhoc.yml new file mode 100644 index 0000000..4fca74c --- /dev/null +++ b/.github/workflows/dbt_run_adhoc.yml @@ -0,0 +1,66 @@ +name: dbt_run_adhoc +run-name: ${{ inputs.dbt_command }} + +on: + workflow_dispatch: + branches: + - "main" + inputs: + environment: + type: choice + description: DBT Run Environment + required: true + options: + - dev + - prod + default: dev + warehouse: + type: choice + description: Snowflake warehouse + required: true + options: + - DBT + - DBT_CLOUD + - DBT_EMERGENCY + default: DBT + dbt_command: + type: string + description: 'DBT Run Command' + required: true + +env: + DBT_PROFILES_DIR: ./ + + ACCOUNT: "${{ vars.ACCOUNT }}" + ROLE: "${{ vars.ROLE }}" + USER: "${{ vars.USER }}" + PASSWORD: "${{ secrets.PASSWORD }}" + REGION: "${{ vars.REGION }}" + DATABASE: "${{ vars.DATABASE }}" + WAREHOUSE: "${{ inputs.warehouse }}" + SCHEMA: "${{ vars.SCHEMA }}" + +concurrency: + group: ${{ github.workflow }} + +jobs: + run_dbt_jobs: + runs-on: ubuntu-latest + environment: + name: workflow_${{ inputs.environment }} + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "pip" + + - name: install dependencies + run: | + pip install -r requirements.txt + dbt deps + - name: Run DBT Jobs + run: | + ${{ inputs.dbt_command }} \ No newline at end of file diff --git a/.github/workflows/dbt_run_deployment.yml b/.github/workflows/dbt_run_deployment.yml new file mode 100644 index 0000000..d0735d8 --- /dev/null +++ b/.github/workflows/dbt_run_deployment.yml @@ -0,0 +1,67 @@ +name: dbt_run_deployment +run-name: ${{ inputs.dbt_command }} + +on: + workflow_dispatch: + branches: + - "main" + inputs: + warehouse: + type: choice + description: Snowflake warehouse + required: true + options: + - DBT + - DBT_CLOUD + - DBT_EMERGENCY + default: DBT + dbt_command: + type: string + description: 'DBT Run Command' + required: true + +env: + DBT_PROFILES_DIR: ./ + DBT_VERSION: "${{ vars.DBT_VERSION }}" + ACCOUNT: "${{ vars.ACCOUNT }}" + ROLE: "${{ vars.ROLE }}" + USER: "${{ vars.USER }}" + PASSWORD: "${{ secrets.PASSWORD }}" + REGION: "${{ vars.REGION }}" + DATABASE: "${{ vars.DATABASE }}" + WAREHOUSE: "${{ inputs.WAREHOUSE }}" + SCHEMA: "${{ vars.SCHEMA }}" + +concurrency: + group: ${{ github.workflow }} + +jobs: + dbt: + runs-on: ubuntu-latest + environment: + name: workflow_prod + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "pip" + + - name: install dependencies + run: | + pip install -r requirements.txt + dbt deps + - name: Run DBT Jobs + run: | + ${{ inputs.dbt_command }} + - name: Run datashare model + run: | + cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi; + dbt run-operation run_query --args "{sql: call admin.datashare.sp_grant_share_permissions('${{ env.DATABASE }}')}" + - name: Store logs + uses: actions/upload-artifact@v3 + with: + name: dbt-logs + path: | + logs + target \ No newline at end of file diff --git a/.github/workflows/dbt_run_dev_refresh.yml b/.github/workflows/dbt_run_dev_refresh.yml new file mode 100644 index 0000000..b7198d3 --- /dev/null +++ b/.github/workflows/dbt_run_dev_refresh.yml @@ -0,0 +1,68 @@ +name: dbt_run_dev_refresh +run-name: dbt_run_dev_refresh + +on: + workflow_dispatch: + branches: + - "main" + +env: + DBT_PROFILES_DIR: ./ + + ACCOUNT: "${{ vars.ACCOUNT }}" + ROLE: "${{ vars.ROLE }}" + USER: "${{ vars.USER }}" + PASSWORD: "${{ secrets.PASSWORD }}" + REGION: "${{ vars.REGION }}" + DATABASE: "${{ vars.DATABASE }}" + WAREHOUSE: "${{ vars.WAREHOUSE }}" + SCHEMA: "${{ vars.SCHEMA }}" + +concurrency: + group: ${{ github.workflow }} + +jobs: + run_dbt_jobs_refresh: + runs-on: ubuntu-latest + environment: + name: workflow_prod + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "pip" + + - name: install dependencies + run: | + pip install -r requirements.txt + dbt deps + - name: Run Dev Refresh + run: | + dbt run-operation fsc_evm.run_sp_create_prod_clone + + run_dbt_jobs_udfs: + runs-on: ubuntu-latest + needs: run_dbt_jobs_refresh + environment: + name: workflow_dev + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "pip" + + - name: install dependencies + run: | + pip install -r requirements.txt + dbt deps + + - name: Run Recreate UDFs + run: | + dbt run-operation fsc_utils.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev + dbt run -s livequery_models.deploy.core._live --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev \ No newline at end of file diff --git a/.github/workflows/dbt_run_scheduled_main.yml b/.github/workflows/dbt_run_scheduled_main.yml new file mode 100644 index 0000000..8efb557 --- /dev/null +++ b/.github/workflows/dbt_run_scheduled_main.yml @@ -0,0 +1,45 @@ +name: dbt_run_scheduled_main +run-name: dbt_run_scheduled_main + +on: + workflow_dispatch: + branches: + - "main" + +env: + DBT_PROFILES_DIR: ./ + + ACCOUNT: "${{ vars.ACCOUNT }}" + ROLE: "${{ vars.ROLE }}" + USER: "${{ vars.USER }}" + PASSWORD: "${{ secrets.PASSWORD }}" + REGION: "${{ vars.REGION }}" + DATABASE: "${{ vars.DATABASE }}" + WAREHOUSE: "${{ vars.WAREHOUSE }}" + SCHEMA: "${{ vars.SCHEMA }}" + +concurrency: + group: ${{ github.workflow }} + +jobs: + run_dbt_jobs: + runs-on: ubuntu-latest + environment: + name: workflow_prod + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "pip" + + - name: install dependencies + run: | + pip install -r requirements.txt + dbt deps + + - name: Run Main Models + run: | + dbt run -m "rise_models,tag:silver_testnet" "rise_models,tag:gold_testnet" \ No newline at end of file diff --git a/.github/workflows/dbt_run_streamline_chainhead.yml b/.github/workflows/dbt_run_streamline_chainhead.yml new file mode 100644 index 0000000..9bd89a5 --- /dev/null +++ b/.github/workflows/dbt_run_streamline_chainhead.yml @@ -0,0 +1,49 @@ +name: dbt_run_streamline_chainhead +run-name: dbt_run_streamline_chainhead + +on: + workflow_dispatch: + branches: + - "main" + +env: + DBT_PROFILES_DIR: ./ + + ACCOUNT: "${{ vars.ACCOUNT }}" + ROLE: "${{ vars.ROLE }}" + USER: "${{ vars.USER }}" + PASSWORD: "${{ secrets.PASSWORD }}" + REGION: "${{ vars.REGION }}" + DATABASE: "${{ vars.DATABASE }}" + WAREHOUSE: "${{ vars.WAREHOUSE }}" + SCHEMA: "${{ vars.SCHEMA }}" + +concurrency: + group: ${{ github.workflow }} + +jobs: + run_dbt_jobs: + runs-on: ubuntu-latest + environment: + name: workflow_prod + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "pip" + + - name: install dependencies + run: | + pip install -r requirements.txt + dbt deps + + - name: Run Chainhead Models + run: | + dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "rise_models,tag:streamline_testnet_complete" "rise_models,tag:streamline_testnet_realtime" + + - name: Run Chainhead Tests + run: | + dbt test -m "rise_models,tag:chainhead" \ No newline at end of file diff --git a/.github/workflows/dbt_run_streamline_history.yml b/.github/workflows/dbt_run_streamline_history.yml new file mode 100644 index 0000000..c29f99a --- /dev/null +++ b/.github/workflows/dbt_run_streamline_history.yml @@ -0,0 +1,45 @@ +name: dbt_run_streamline_history +run-name: dbt_run_streamline_history + +on: + workflow_dispatch: + branches: + - "main" + +env: + DBT_PROFILES_DIR: ./ + + ACCOUNT: "${{ vars.ACCOUNT }}" + ROLE: "${{ vars.ROLE }}" + USER: "${{ vars.USER }}" + PASSWORD: "${{ secrets.PASSWORD }}" + REGION: "${{ vars.REGION }}" + DATABASE: "${{ vars.DATABASE }}" + WAREHOUSE: "${{ vars.WAREHOUSE }}" + SCHEMA: "${{ vars.SCHEMA }}" + +concurrency: + group: ${{ github.workflow }} + +jobs: + run_dbt_jobs: + runs-on: ubuntu-latest + environment: + name: workflow_prod + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "pip" + + - name: install dependencies + run: | + pip install -r requirements.txt + dbt deps + + - name: Run History Models + run: | + dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "rise_models,tag:streamline_testnet_complete" "rise_models,tag:streamline_testnet_history" \ No newline at end of file diff --git a/.github/workflows/dbt_test_daily.yml b/.github/workflows/dbt_test_daily.yml new file mode 100644 index 0000000..c52ef57 --- /dev/null +++ b/.github/workflows/dbt_test_daily.yml @@ -0,0 +1,49 @@ +name: dbt_test_daily +run-name: dbt_test_daily + +on: + workflow_dispatch: + branches: + - "main" + +env: + DBT_PROFILES_DIR: ./ + + ACCOUNT: "${{ vars.ACCOUNT }}" + ROLE: "${{ vars.ROLE }}" + USER: "${{ vars.USER }}" + PASSWORD: "${{ secrets.PASSWORD }}" + REGION: "${{ vars.REGION }}" + DATABASE: "${{ vars.DATABASE }}" + WAREHOUSE: "${{ vars.WAREHOUSE }}" + SCHEMA: "${{ vars.SCHEMA }}" + +concurrency: + group: ${{ github.workflow }} + +jobs: + run_dbt_jobs: + runs-on: ubuntu-latest + environment: + name: workflow_test + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "pip" + + - name: install dependencies + run: | + pip install -r requirements.txt + dbt deps + + - name: Build Daily Testing Views + run: | + dbt run -m "fsc_evm,tag:daily_test" + + - name: Run Daily Tests + run: | + dbt test -m "fsc_evm,tag:daily_test" \ No newline at end of file diff --git a/.github/workflows/dbt_test_intraday.yml b/.github/workflows/dbt_test_intraday.yml new file mode 100644 index 0000000..2aeea57 --- /dev/null +++ b/.github/workflows/dbt_test_intraday.yml @@ -0,0 +1,49 @@ +name: dbt_test_intraday +run-name: dbt_test_intraday + +on: + workflow_dispatch: + branches: + - "main" + +env: + DBT_PROFILES_DIR: ./ + + ACCOUNT: "${{ vars.ACCOUNT }}" + ROLE: "${{ vars.ROLE }}" + USER: "${{ vars.USER }}" + PASSWORD: "${{ secrets.PASSWORD }}" + REGION: "${{ vars.REGION }}" + DATABASE: "${{ vars.DATABASE }}" + WAREHOUSE: "${{ vars.WAREHOUSE }}" + SCHEMA: "${{ vars.SCHEMA }}" + +concurrency: + group: ${{ github.workflow }} + +jobs: + run_dbt_jobs: + runs-on: ubuntu-latest + environment: + name: workflow_test + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "pip" + + - name: install dependencies + run: | + pip install -r requirements.txt + dbt deps + + - name: Build Recent Testing Views + run: | + dbt run -m "fsc_evm,tag:recent_test" + + - name: Run Recent Tests + run: | + dbt test -m "fsc_evm,tag:recent_test" \ No newline at end of file diff --git a/.github/workflows/dbt_test_monthly.yml b/.github/workflows/dbt_test_monthly.yml new file mode 100644 index 0000000..b32e830 --- /dev/null +++ b/.github/workflows/dbt_test_monthly.yml @@ -0,0 +1,49 @@ +name: dbt_test_monthly +run-name: dbt_test_monthly + +on: + workflow_dispatch: + branches: + - "main" + +env: + DBT_PROFILES_DIR: ./ + + ACCOUNT: "${{ vars.ACCOUNT }}" + ROLE: "${{ vars.ROLE }}" + USER: "${{ vars.USER }}" + PASSWORD: "${{ secrets.PASSWORD }}" + REGION: "${{ vars.REGION }}" + DATABASE: "${{ vars.DATABASE }}" + WAREHOUSE: "${{ vars.WAREHOUSE }}" + SCHEMA: "${{ vars.SCHEMA }}" + +concurrency: + group: ${{ github.workflow }} + +jobs: + run_dbt_jobs: + runs-on: ubuntu-latest + environment: + name: workflow_test + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "pip" + + - name: install dependencies + run: | + pip install -r requirements.txt + dbt deps + + - name: Build Full Testing Views + run: | + dbt run -m "fsc_evm,tag:full_test" + + - name: Run Full Tests + run: | + dbt test -m "fsc_evm,tag:full_test" \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..2e8fe84 --- /dev/null +++ b/.gitignore @@ -0,0 +1,20 @@ + +target/ +dbt_modules/ +# newer versions of dbt use this directory instead of dbt_modules for test dependencies +dbt_packages/ +logs/ + +.venv/ +.python-version + +# Visual Studio Code files +*/.vscode +*.code-workspace +.history/ +**/.DS_Store +.vscode/ +.env +dbt-env/ + +package-lock.yml \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..e2eacdd --- /dev/null +++ b/README.md @@ -0,0 +1,145 @@ +## Repo Set Up +1. Create a new repository from the [evm-models-template](https://github.com/FlipsideCrypto/evm-models-template) +2. Update all references to `rise` to the new chain name, in lowercase by using find and replace +3. Update the fsc-evm package version in `packages.yml` to the latest version +4. Set up the rest of the dbt project, where applicable, including but not limited to: + - `dbt_project.yml` (enable/disable packages, vars, etc.) + - `.github/workflows` (update tags, etc.) + - `github_actions__workflows.csv` (update schedule, workflows, etc.) + - `overview.md` (update `rise`, table references, docs etc.) + - `sources.yml` (update schemas, tables etc.) + - `requirements.txt` (update dependencies) + - other files where applicable + +## Profile Set Up + +#### Use the following within profiles.yml +---- + +```yml +: -- replace / with the profile or name from, remove this comment in your yml + target: dev + outputs: + dev: + type: snowflake + account: + role: INTERNAL_DEV + user: + authenticator: externalbrowser + region: us-east-1 + database: _DEV + warehouse: DBT + schema: silver + threads: 4 + client_session_keep_alive: False + query_tag: dbt__dev + + prod: + type: snowflake + account: + role: DBT_CLOUD_ + user: + authenticator: externalbrowser + region: us-east-1 + database: + warehouse: DBT_CLOUD_ + schema: silver + threads: 4 + client_session_keep_alive: False + query_tag: dbt__dev +``` + +### Common DBT Run Variables + +The following variables can be used to control various aspects of the dbt run. Use them with the `--vars` flag when running dbt commands. + +| Variable | Description | Example Usage | +|----------|-------------|---------------| +| `UPDATE_UDFS_AND_SPS` | Update User Defined Functions and Stored Procedures. By default, this is set to False | `--vars '{"UPDATE_UDFS_AND_SPS":true}'` | +| `STREAMLINE_INVOKE_STREAMS` | Invoke Streamline processes. By default, this is set to False | `--vars '{"STREAMLINE_INVOKE_STREAMS":true}'` | +| `STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES` | Use development environment for external tables. By default, this is set to False | `--vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}'` | +| `HEAL_CURATED_MODEL` | Heal specific curated models. By default, this is set to an empty array []. See more below. | `--vars '{"HEAL_CURATED_MODEL":["axelar","across","celer_cbridge"]}'` | +| `UPDATE_SNOWFLAKE_TAGS` | Control updating of Snowflake tags. By default, this is set to False | `--vars '{"UPDATE_SNOWFLAKE_TAGS":false}'` | +| `START_GHA_TASKS` | Start GitHub Actions tasks. By default, this is set to False | `--vars '{"START_GHA_TASKS":true}'` | + +#### Example Commands + +1. Update UDFs and SPs: + ``` + dbt run --vars '{"UPDATE_UDFS_AND_SPS":true}' -m ... + ``` + +2. Invoke Streamline and use dev for external tables: + ``` + dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":true,"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -m ... + ``` + +3. Heal specific curated models: + ``` + dbt run --vars '{"HEAL_CURATED_MODEL":["axelar","across","celer_cbridge"]}' -m ... + ``` + +4. Update Snowflake tags for a specific model: + ``` + dbt run --vars '{"UPDATE_SNOWFLAKE_TAGS":true}' -s models/silver/utilities/silver__number_sequence.sql + ``` + +5. Start GHA tasks: + ``` + dbt seed -s github_actions__workflows && dbt run -m models/github_actions --full-refresh && dbt run-operation fsc_utils.create_gha_tasks --vars '{"START_GHA_TASKS":True}' + ``` + +6. Using two or more variables: + ``` + dbt run --vars '{"UPDATE_UDFS_AND_SPS":true,"STREAMLINE_INVOKE_STREAMS":true,"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -m ... + ``` + +> Note: Replace `-m ...` with appropriate model selections or tags as needed for your project structure. + +## FSC_EVM + +`fsc_evm` is a collection of macros, models, and other resources that are used to build the Flipside Crypto EVM models. + +For more information on the `fsc_evm` package, see the [FSC_EVMWiki](https://github.com/FlipsideCrypto/fsc-evm/wiki). + +## Applying Model Tags + +### Database / Schema level tags + +Database and schema tags are applied via the `fsc_evm.add_database_or_schema_tags` macro. These tags are inherited by their downstream objects. To add/modify tags call the appropriate tag set function within the macro. + +``` +{{ fsc_evm.set_database_tag_value('SOME_DATABASE_TAG_KEY','SOME_DATABASE_TAG_VALUE') }} +{{ fsc_evm.set_schema_tag_value('SOME_SCHEMA_TAG_KEY','SOME_SCHEMA_TAG_VALUE') }} +``` + +### Model tags + +To add/update a model's snowflake tags, add/modify the `meta` model property under `config`. Only table level tags are supported at this time via DBT. + +{% raw %} +{{ config( + ..., + meta={ + 'database_tags':{ + 'table': { + 'PURPOSE': 'SOME_PURPOSE' + } + } + }, + ... +) }} +{% endraw %} + +By default, model tags are pushed to Snowflake on each load. You can disable this by setting the `UPDATE_SNOWFLAKE_TAGS` project variable to `False` during a run. + +``` +dbt run --vars '{"UPDATE_SNOWFLAKE_TAGS":False}' -s models/silver/utilities/silver__number_sequence.sql +``` + +### Querying for existing tags on a model in snowflake + +``` +select * +from table(.information_schema.tag_references('.core.fact_blocks', 'table')); +``` \ No newline at end of file diff --git a/analysis/.gitkeep b/analysis/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/data/.gitkeep b/data/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/data/github_actions__workflows.csv b/data/github_actions__workflows.csv new file mode 100644 index 0000000..758e51f --- /dev/null +++ b/data/github_actions__workflows.csv @@ -0,0 +1,4 @@ +workflow_name,workflow_schedule +dbt_run_streamline_chainhead,"6,36 * * * *" +dbt_run_dev_refresh,"8 5 * * 1" +dbt_run_scheduled_main,"51 * * * *" \ No newline at end of file diff --git a/dbt_project.yml b/dbt_project.yml new file mode 100644 index 0000000..549b6e1 --- /dev/null +++ b/dbt_project.yml @@ -0,0 +1,122 @@ +# Name your project! Project names should contain only lowercase characters +# and underscores. A good package name should reflect your organization's +# name or the intended use of these models +name: "rise_models" # replace with the name of the chain +version: "1.0.0" +config-version: 2 + +# This setting configures which "profile" dbt uses for this project. +profile: "rise" # replace with the name of the chain + +# These configurations specify where dbt should look for different types of files. +# The `source-paths` config, for example, states that models in this project can be +# found in the "models/" directory. You probably won't need to change these! +model-paths: ["models"] +analysis-paths: ["analysis"] +test-paths: ["tests"] +seed-paths: ["data"] +macro-paths: ["macros"] +snapshot-paths: ["snapshots"] +docs-paths: ["dbt_packages/fsc_evm/doc_descriptions", "models/doc_descriptions", "models"] + +target-path: "target" # directory which will store compiled SQL files +clean-targets: # directories to be removed by `dbt clean` + - "target" + - "dbt_modules" + - "dbt_packages" + +tests: + rise_models: # replace with the name of the chain + +store_failures: true # all tests + +on-run-start: + - "{{ fsc_evm.create_sps() }}" + - "{{ fsc_evm.create_udfs() }}" + +on-run-end: + - '{{ fsc_evm.apply_meta_as_tags(results) }}' + +dispatch: + - macro_namespace: dbt + search_order: + - rise-models + - dbt_snowflake_query_tags + - dbt + +query-comment: + comment: '{{ dbt_snowflake_query_tags.get_query_comment(node) }}' + append: true # Snowflake removes prefixed comments. + +# Configuring models +# Full documentation: https://docs.getdbt.com/docs/configuring-models + +models: + rise_models: # replace with the name of the chain + +copy_grants: true + +persist_docs: + relation: true + columns: true + +on_schema_change: "append_new_columns" + livequery_models: + +enabled: true # Keep livequery models enabled since you need them + +materialized: ephemeral + fsc_evm: + +enabled: false # keep fsc_evm package disabled + +copy_grants: true + +persist_docs: + relation: true + columns: true + +on_schema_change: "append_new_columns" + +vars: + "dbt_date:time_zone": GMT + STREAMLINE_INVOKE_STREAMS: False + STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False + UPDATE_UDFS_AND_SPS: False + UPDATE_SNOWFLAKE_TAGS: True + OBSERV_FULL_TEST: False + WAIT: 0 + HEAL_MODEL: False + HEAL_MODELS: [] + START_GHA_TASKS: False + +#### STREAMLINE 2.0 BEGIN #### + + API_INTEGRATION: '{{ var("config")[target.name]["API_INTEGRATION"] if var("config")[target.name] else var("config")["dev"]["API_INTEGRATION"] }}' + EXTERNAL_FUNCTION_URI: '{{ var("config")[target.name]["EXTERNAL_FUNCTION_URI"] if var("config")[target.name] else var("config")["dev"]["EXTERNAL_FUNCTION_URI"] }}' + ROLES: | + ["INTERNAL_DEV"] + + config: + # The keys correspond to dbt profiles and are case sensitive + dev: + API_INTEGRATION: AWS_RISE_API_STG_V2 + EXTERNAL_FUNCTION_URI: 5o9bonenwi.execute-api.us-east-1.amazonaws.com/stg/ + ROLES: + - AWS_LAMBDA_RISE_API # replace with the name of the chain + - INTERNAL_DEV + + # prod: + # API_INTEGRATION: AWS_RISE_API_PROD_V2 + # EXTERNAL_FUNCTION_URI: .execute-api.us-east-1.amazonaws.com/prod/ + # ROLES: + # - AWS_LAMBDA_RISE_API # replace with the name of the chain + # - INTERNAL_DEV + # - DBT_CLOUD_RISE # replace with the name of the chain + +#### STREAMLINE 2.0 END #### + +#### FSC_EVM BEGIN #### + + ### GLOBAL VARIABLES BEGIN ### + + ## REQUIRED + GLOBAL_PROD_DB_NAME: "rise" + GLOBAL_NODE_SECRET_PATH: "insert_vault_path" + GLOBAL_BLOCKS_PER_HOUR: 3600 + + ### GLOBAL VARIABLES END ### + +# Please visit https://github.com/FlipsideCrypto/fsc-evm/wiki for more information on required and optional variables + +#### FSC_EVM END #### \ No newline at end of file diff --git a/macros/custom_naming_macros.sql b/macros/custom_naming_macros.sql new file mode 100644 index 0000000..0f4a72c --- /dev/null +++ b/macros/custom_naming_macros.sql @@ -0,0 +1,11 @@ +{% macro generate_schema_name(custom_schema_name=none, node=none) -%} + {% set node_name = node.name %} + {% set split_name = node_name.split('__') %} + {{ split_name[0] | trim }} +{%- endmacro %} + +{% macro generate_alias_name(custom_alias_name=none, node=none) -%} + {% set node_name = node.name %} + {% set split_name = node_name.split('__') %} + {{ split_name[1] | trim }} +{%- endmacro %} diff --git a/macros/dbt/get_merge_sql.sql b/macros/dbt/get_merge_sql.sql new file mode 100644 index 0000000..8fefc01 --- /dev/null +++ b/macros/dbt/get_merge_sql.sql @@ -0,0 +1,44 @@ +{% macro get_merge_sql( + target, + source, + unique_key, + dest_columns, + incremental_predicates + ) -%} + {% set predicate_override = "" %} + {% if incremental_predicates [0] == "dynamic_range" %} + -- run some queries to dynamically determine the min + max of this 'input_column' in the new data + {% set input_column = incremental_predicates [1] %} + {% set get_limits_query %} + SELECT + MIN( + {{ input_column }} + ) AS lower_limit, + MAX( + {{ input_column }} + ) AS upper_limit + FROM + {{ source }} + + {% endset %} + {% set limits = run_query(get_limits_query) [0] %} + {% set lower_limit, + upper_limit = limits [0], + limits [1] %} + -- use those calculated min + max values to limit 'target' scan, to only the days with new data + {% set predicate_override %} + dbt_internal_dest.{{ input_column }} BETWEEN '{{ lower_limit }}' + AND '{{ upper_limit }}' {% endset %} + {% endif %} + + {% set predicates = [predicate_override] if predicate_override else incremental_predicates %} + -- standard merge from here + {% set merge_sql = dbt.get_merge_sql( + target, + source, + unique_key, + dest_columns, + predicates + ) %} + {{ return(merge_sql) }} +{% endmacro %} diff --git a/macros/dbt/get_tmp_relation_type.sql b/macros/dbt/get_tmp_relation_type.sql new file mode 100644 index 0000000..3bb7438 --- /dev/null +++ b/macros/dbt/get_tmp_relation_type.sql @@ -0,0 +1,8 @@ +{% macro dbt_snowflake_get_tmp_relation_type( + strategy, + unique_key, + language + ) %} + -- always table + {{ return('table') }} +{% endmacro %} diff --git a/makefile b/makefile new file mode 100644 index 0000000..5ca1f34 --- /dev/null +++ b/makefile @@ -0,0 +1,44 @@ +DBT_TARGET ?= dev + +deploy_streamline_functions: + rm -f package-lock.yml && dbt clean && dbt deps + dbt run -s livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET) + dbt run-operation fsc_utils.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET) + +cleanup_time: + rm -f package-lock.yml && dbt clean && dbt deps + +deploy_streamline_tables: + rm -f package-lock.yml && dbt clean && dbt deps +ifeq ($(findstring dev,$(DBT_TARGET)),dev) + dbt run -m "fsc_evm,tag:bronze_external" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":True}' -t $(DBT_TARGET) +else + dbt run -m "fsc_evm,tag:bronze_external" -t $(DBT_TARGET) +endif + dbt run -m "fsc_evm,tag:streamline_core_complete" "fsc_evm,tag:streamline_core_realtime" "fsc_evm,tag:utils" --full-refresh -t $(DBT_TARGET) + +deploy_streamline_requests: + rm -f package-lock.yml && dbt clean && dbt deps + dbt run -m "fsc_evm,tag:streamline_core_complete" "fsc_evm,tag:streamline_core_realtime" --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET) + +deploy_github_actions: + dbt run -s livequery_models.deploy.marketplace.github --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET) + dbt seed -s github_actions__workflows -t $(DBT_TARGET) + dbt run -m models/github_actions --full-refresh -t $(DBT_TARGET) +ifeq ($(findstring dev,$(DBT_TARGET)),dev) + dbt run-operation fsc_utils.create_gha_tasks --vars '{"START_GHA_TASKS":False}' -t $(DBT_TARGET) +else + dbt run-operation fsc_utils.create_gha_tasks --vars '{"START_GHA_TASKS":True}' -t $(DBT_TARGET) +endif + +deploy_new_github_action: + dbt run-operation fsc_evm.drop_github_actions_schema -t $(DBT_TARGET) + dbt seed -s github_actions__workflows -t $(DBT_TARGET) + dbt run -m models/github_actions --full-refresh -t $(DBT_TARGET) +ifeq ($(findstring dev,$(DBT_TARGET)),dev) + dbt run-operation fsc_utils.create_gha_tasks --vars '{"START_GHA_TASKS":False}' -t $(DBT_TARGET) +else + dbt run-operation fsc_utils.create_gha_tasks --vars '{"START_GHA_TASKS":True}' -t $(DBT_TARGET) +endif + +.PHONY: deploy_streamline_functions deploy_streamline_tables deploy_streamline_requests deploy_github_actions cleanup_time deploy_new_github_action \ No newline at end of file diff --git a/models/__overview__.md b/models/__overview__.md new file mode 100644 index 0000000..e81676e --- /dev/null +++ b/models/__overview__.md @@ -0,0 +1,72 @@ +{% docs __overview__ %} + +# Welcome to the Flipside Crypto Monad Models Documentation! + +## **What does this documentation cover?** +The documentation included here details the design of the Core tables and views available via [Flipside Crypto.](https://flipsidecrypto.xyz/) For more information on how these models are built, please see [the github repository.](https://github.com/FlipsideCrypto/rise-models) + +## **How do I use these docs?** +The easiest way to navigate this documentation is to use the Quick Links below. These links will take you to the documentation for each table, which contains a description, a list of the columns, and other helpful information. + +If you are experienced with dbt docs, feel free to use the sidebar to navigate the documentation, as well as explore the relationships between tables and the logic building them. + +There is more information on how to use dbt docs in the last section of this document. + +## **Quick Links to Table Documentation** + +**Click on the links below to jump to the documentation for each schema.** + +### Testnet Tables (rise.testnet) + +**Fact Tables:** +- [fact_blocks](https://flipsidecrypto.github.io/rise-models/#!/model/model.fsc_evm.core__fact_blocks) +- [fact_event_logs](https://flipsidecrypto.github.io/rise-models/#!/model/model.fsc_evm.core__fact_event_logs) +- [fact_transactions](https://flipsidecrypto.github.io/rise-models/#!/model/model.fsc_evm.core__fact_transactions) +- [fact_traces](https://flipsidecrypto.github.io/rise-models/#!/model/model.fsc_evm.core__fact_traces) + +## **Helpful User-Defined Functions (UDFs)** + +UDFs are custom functions built by the Flipside team that can be used in your queries to make your life easier. + +Please visit [LiveQuery Functions Overview](https://flipsidecrypto.github.io/livequery-models/#!/overview) for a full list of helpful UDFs. + +## **Data Model Overview** + +The Core models are built a few different ways, but the core fact tables are built using three layers of sql models: **bronze, silver, and gold (or core).** + +- Bronze: Data is loaded in from the source as a view +- Silver: All necessary parsing, filtering, de-duping, and other transformations are done here +- Gold (or Core): Final views and tables that are available publicly + +The dimension tables are sourced from a variety of on-chain and off-chain sources. + +Convenience views (denoted ez_) are a combination of different fact and dimension tables. These views are built to make it easier to query the data. + +## **Using dbt docs** +### Navigation + +You can use the ```Project``` and ```Database``` navigation tabs on the left side of the window to explore the models in the project. + +### Database Tab + +This view shows relations (tables and views) grouped into database schemas. Note that ephemeral models are *not* shown in this interface, as they do not exist in the database. + +### Graph Exploration + +You can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models. + +On model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the Expand button at the top-right of this lineage pane, you'll be able to see all of the models that are used to build, or are built from, the model you're exploring. + +Once expanded, you'll be able to use the ```--models``` and ```--exclude``` model selection syntax to filter the models in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax). + +Note that you can also right-click on models to interactively filter and explore the graph. + + +### **More information** +- [Flipside](https://flipsidecrypto.xyz/) +- [Data Studio](https://flipsidecrypto.xyz/studio) +- [Tutorials](https://docs.flipsidecrypto.com/our-data/tutorials) +- [Github](https://github.com/FlipsideCrypto/rise-models) +- [What is dbt?](https://docs.getdbt.com/docs/introduction) + +{% enddocs %} \ No newline at end of file diff --git a/models/github_actions/github_actions__current_task_status.sql b/models/github_actions/github_actions__current_task_status.sql new file mode 100644 index 0000000..577a226 --- /dev/null +++ b/models/github_actions/github_actions__current_task_status.sql @@ -0,0 +1,6 @@ +{{ config( + materialized = 'view', + tags = ['gha_tasks'] +) }} + +{{ fsc_utils.gha_task_current_status_view() }} \ No newline at end of file diff --git a/models/github_actions/github_actions__task_history.sql b/models/github_actions/github_actions__task_history.sql new file mode 100644 index 0000000..22c90a9 --- /dev/null +++ b/models/github_actions/github_actions__task_history.sql @@ -0,0 +1,6 @@ +{{ config( + materialized = 'view', + tags = ['gha_tasks'] +) }} + +{{ fsc_utils.gha_task_history_view() }} \ No newline at end of file diff --git a/models/github_actions/github_actions__task_performance.sql b/models/github_actions/github_actions__task_performance.sql new file mode 100644 index 0000000..c5ccf86 --- /dev/null +++ b/models/github_actions/github_actions__task_performance.sql @@ -0,0 +1,6 @@ +{{ config( + materialized = 'view', + tags = ['gha_tasks'] +) }} + +{{ fsc_utils.gha_task_performance_view() }} \ No newline at end of file diff --git a/models/github_actions/github_actions__task_schedule.sql b/models/github_actions/github_actions__task_schedule.sql new file mode 100644 index 0000000..3e710a7 --- /dev/null +++ b/models/github_actions/github_actions__task_schedule.sql @@ -0,0 +1,6 @@ +{{ config( + materialized = 'view', + tags = ['gha_tasks'] +) }} + +{{ fsc_utils.gha_task_schedule_view() }} \ No newline at end of file diff --git a/models/github_actions/github_actions__tasks.sql b/models/github_actions/github_actions__tasks.sql new file mode 100644 index 0000000..1ac65b7 --- /dev/null +++ b/models/github_actions/github_actions__tasks.sql @@ -0,0 +1,6 @@ +{{ config( + materialized = 'view', + tags = ['gha_tasks'] +) }} + +{{ fsc_utils.gha_tasks_view() }} \ No newline at end of file diff --git a/models/sources.yml b/models/sources.yml new file mode 100644 index 0000000..8d38ff0 --- /dev/null +++ b/models/sources.yml @@ -0,0 +1,45 @@ +version: 2 + +sources: + - name: github_actions + database: "{{ target.database }}" + schema: github_actions + tables: + - name: workflows + - name: bronze_streamline + database: streamline + schema: >- + {{ var('GLOBAL_PROD_DB_NAME') ~ ('_dev' if var("STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES", False) else '') }} + tables: + - name: testnet_blocks + - name: testnet_transactions + - name: testnet_receipts + - name: testnet_traces + - name: testnet_confirm_blocks + - name: testnet_decoded_logs + - name: crosschain_silver + database: "{{ 'crosschain' if target.database.upper() == var('GLOBAL_PROD_DB_NAME').upper() else 'crosschain_dev' }}" + schema: silver + tables: + - name: labels_combined + - name: complete_provider_asset_metadata + - name: complete_native_asset_metadata + - name: complete_native_prices + - name: complete_provider_prices + - name: complete_token_asset_metadata + - name: complete_token_prices + - name: bronze_api + database: "{{ target.database }}" + schema: bronze_api + tables: + - name: contract_abis + - name: crosschain_public + database: crosschain + schema: bronze_public + tables: + - name: user_abis + - name: silver + database: "{{ target.database }}" + schema: silver + tables: + - name: verified_abis \ No newline at end of file diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__blocks.sql b/models/testnet/core/bronze/streamline/bronze_testnet__blocks.sql new file mode 100644 index 0000000..ac61e42 --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__blocks.sql @@ -0,0 +1,41 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + job_created_time AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_file_registration_history( + start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()), + table_name => '{{ source( "bronze_streamline", "testnet_blocks") }}') + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_blocks" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__blocks_fr.sql b/models/testnet/core/bronze/streamline/bronze_testnet__blocks_fr.sql new file mode 100644 index 0000000..48faf7a --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__blocks_fr.sql @@ -0,0 +1,42 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + registered_on AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_files( + table_name => '{{ source( "bronze_streamline", "testnet_blocks") }}' + ) + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.value :"block_number" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_blocks" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__confirm_blocks.sql b/models/testnet/core/bronze/streamline/bronze_testnet__confirm_blocks.sql new file mode 100644 index 0000000..f691685 --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__confirm_blocks.sql @@ -0,0 +1,41 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + job_created_time AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_file_registration_history( + start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()), + table_name => '{{ source( "bronze_streamline", "testnet_confirm_blocks") }}') + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_confirm_blocks" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__confirm_blocks_fr.sql b/models/testnet/core/bronze/streamline/bronze_testnet__confirm_blocks_fr.sql new file mode 100644 index 0000000..6e45963 --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__confirm_blocks_fr.sql @@ -0,0 +1,42 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + registered_on AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_files( + table_name => '{{ source( "bronze_streamline", "testnet_confirm_blocks") }}' + ) + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.value :"block_number" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_confirm_blocks" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__receipts.sql b/models/testnet/core/bronze/streamline/bronze_testnet__receipts.sql new file mode 100644 index 0000000..4ad9876 --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__receipts.sql @@ -0,0 +1,41 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + job_created_time AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_file_registration_history( + start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()), + table_name => '{{ source( "bronze_streamline", "testnet_receipts") }}') + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_receipts" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__receipts_fr.sql b/models/testnet/core/bronze/streamline/bronze_testnet__receipts_fr.sql new file mode 100644 index 0000000..43efefd --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__receipts_fr.sql @@ -0,0 +1,42 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + registered_on AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_files( + table_name => '{{ source( "bronze_streamline", "testnet_receipts") }}' + ) + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.value :"block_number" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_receipts" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__traces.sql b/models/testnet/core/bronze/streamline/bronze_testnet__traces.sql new file mode 100644 index 0000000..921a718 --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__traces.sql @@ -0,0 +1,41 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + job_created_time AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_file_registration_history( + start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()), + table_name => '{{ source( "bronze_streamline", "testnet_traces") }}') + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_traces" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__traces_fr.sql b/models/testnet/core/bronze/streamline/bronze_testnet__traces_fr.sql new file mode 100644 index 0000000..c9de29c --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__traces_fr.sql @@ -0,0 +1,42 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + registered_on AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_files( + table_name => '{{ source( "bronze_streamline", "testnet_traces") }}' + ) + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.value :"block_number" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_traces" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__transactions.sql b/models/testnet/core/bronze/streamline/bronze_testnet__transactions.sql new file mode 100644 index 0000000..1ae7b59 --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__transactions.sql @@ -0,0 +1,41 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + job_created_time AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_file_registration_history( + start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()), + table_name => '{{ source( "bronze_streamline", "testnet_transactions") }}') + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_transactions" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__transactions_fr.sql b/models/testnet/core/bronze/streamline/bronze_testnet__transactions_fr.sql new file mode 100644 index 0000000..8cfa2f2 --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__transactions_fr.sql @@ -0,0 +1,42 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + registered_on AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_files( + table_name => '{{ source( "bronze_streamline", "testnet_transactions") }}' + ) + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.value :"block_number" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_transactions" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/bronze/token_reads/bronze_api_testnet__token_reads.sql b/models/testnet/core/bronze/token_reads/bronze_api_testnet__token_reads.sql new file mode 100644 index 0000000..466e300 --- /dev/null +++ b/models/testnet/core/bronze/token_reads/bronze_api_testnet__token_reads.sql @@ -0,0 +1,130 @@ +{%- set node_url = var('GLOBAL_NODE_URL', '{Service}/{Authentication}') -%} +{%- set node_secret_path = var('GLOBAL_NODE_SECRET_PATH', '') -%} + +{{ config( + materialized = 'incremental', + unique_key = "contract_address", + full_refresh = false, + tags = ['bronze_testnet', 'recent_test', 'contracts'] +) }} + +WITH base AS ( + + SELECT + contract_address, + latest_event_block AS latest_block + FROM + {{ ref('silver_testnet__relevant_contracts') }} + WHERE + total_event_count >= 25 + +{% if is_incremental() %} +AND contract_address NOT IN ( + SELECT + contract_address + FROM + {{ this }} +) +{% endif %} +ORDER BY + total_event_count DESC +LIMIT + 200 +), function_sigs AS ( + SELECT + '0x313ce567' AS function_sig, + 'decimals' AS function_name + UNION + SELECT + '0x06fdde03', + 'name' + UNION + SELECT + '0x95d89b41', + 'symbol' +), +all_reads AS ( + SELECT + * + FROM + base + JOIN function_sigs + ON 1 = 1 +), +ready_reads AS ( + SELECT + contract_address, + latest_block, + function_sig, + RPAD( + function_sig, + 64, + '0' + ) AS input, + utils.udf_json_rpc_call( + 'eth_call', + [{'to': contract_address, 'from': null, 'data': input}, utils.udf_int_to_hex(latest_block)], + concat_ws( + '-', + contract_address, + input, + latest_block + ) + ) AS rpc_request + FROM + all_reads +), +batch_reads AS ( + SELECT + ARRAY_AGG(rpc_request) AS batch_rpc_request + FROM + ready_reads +), +node_call AS ( + SELECT + *, + live.udf_api( + 'POST', + '{{ node_url }}', + {}, + batch_rpc_request, + '{{ node_secret_path }}' + ) AS response + FROM + batch_reads + WHERE + EXISTS ( + SELECT + 1 + FROM + ready_reads + LIMIT + 1 + ) +), flat_responses AS ( + SELECT + VALUE :id :: STRING AS call_id, + VALUE :result :: STRING AS read_result + FROM + node_call, + LATERAL FLATTEN ( + input => response :data + ) +) +SELECT + SPLIT_PART( + call_id, + '-', + 1 + ) AS contract_address, + SPLIT_PART( + call_id, + '-', + 3 + ) AS block_number, + LEFT(SPLIT_PART(call_id, '-', 2), 10) AS function_sig, + NULL AS function_input, + read_result, + SYSDATE() :: TIMESTAMP AS _inserted_timestamp +FROM + flat_responses \ No newline at end of file diff --git a/models/testnet/core/bronze/token_reads/bronze_api_testnet__token_reads.yml b/models/testnet/core/bronze/token_reads/bronze_api_testnet__token_reads.yml new file mode 100644 index 0000000..569ba78 --- /dev/null +++ b/models/testnet/core/bronze/token_reads/bronze_api_testnet__token_reads.yml @@ -0,0 +1,15 @@ +version: 2 +models: + - name: bronze_api_testnet__token_reads + + columns: + - name: _INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: day + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ \ No newline at end of file diff --git a/models/testnet/core/gold/testnet__dim_contracts.sql b/models/testnet/core/gold/testnet__dim_contracts.sql new file mode 100644 index 0000000..bafb052 --- /dev/null +++ b/models/testnet/core/gold/testnet__dim_contracts.sql @@ -0,0 +1,46 @@ +{{ config( + materialized = 'incremental', + unique_key = 'address', + merge_exclude_columns = ["inserted_timestamp"], + post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(address, symbol, name), SUBSTRING(address, symbol, name)", + tags = ['gold_testnet', 'contracts'] +) }} + +SELECT + LOWER(COALESCE(c0.created_contract_address,c1.contract_address)) AS address, + c1.token_symbol AS symbol, + c1.token_name AS NAME, + c1.token_decimals AS decimals, + c0.block_number AS created_block_number, + c0.block_timestamp AS created_block_timestamp, + c0.tx_hash AS created_tx_hash, + c0.creator_address AS creator_address, + c0.created_contracts_id AS dim_contracts_id, + GREATEST(COALESCE(c0.inserted_timestamp, '2000-01-01'), COALESCE(c1.inserted_timestamp, '2000-01-01')) AS inserted_timestamp, + GREATEST(COALESCE(c0.modified_timestamp, '2000-01-01'), COALESCE(c1.modified_timestamp, '2000-01-01')) AS modified_timestamp +FROM + {{ ref('silver_testnet__created_contracts') }} + c0 + FULL OUTER JOIN {{ ref('silver_testnet__contracts') }} + c1 + ON LOWER( + c0.created_contract_address + ) = LOWER( + c1.contract_address + ) +{% if is_incremental() %} +WHERE + c0.modified_timestamp > ( + SELECT + COALESCE(MAX(modified_timestamp), '1970-01-01' :: TIMESTAMP) AS modified_timestamp + FROM + {{ this }} + ) + OR + c1.modified_timestamp > ( + SELECT + COALESCE(MAX(modified_timestamp), '1970-01-01' :: TIMESTAMP) AS modified_timestamp + FROM + {{ this }} + ) +{% endif %} \ No newline at end of file diff --git a/models/testnet/core/gold/testnet__dim_contracts.yml b/models/testnet/core/gold/testnet__dim_contracts.yml new file mode 100644 index 0000000..74af12d --- /dev/null +++ b/models/testnet/core/gold/testnet__dim_contracts.yml @@ -0,0 +1,28 @@ +version: 2 +models: + - name: testnet__dim_contracts + description: '{{ doc("evm_contracts_table_doc") }}' + + columns: + - name: ADDRESS + description: '{{ doc("evm_contracts_contract_address") }}' + - name: SYMBOL + description: '{{ doc("evm_contracts_symbol") }}' + - name: NAME + description: '{{ doc("evm_contracts_name") }}' + - name: DECIMALS + description: '{{ doc("evm_decimals") }}' + - name: CREATED_BLOCK_NUMBER + description: '{{ doc("evm_contracts_block_number") }}' + - name: CREATED_BLOCK_TIMESTAMP + description: '{{ doc("evm_contracts_block_time") }}' + - name: CREATED_TX_HASH + description: '{{ doc("evm_contracts_created_tx_hash") }}' + - name: CREATOR_ADDRESS + description: '{{ doc("evm_creator_address") }}' + - name: DIM_CONTRACTS_ID + description: '{{ doc("evm_pk") }}' + - name: INSERTED_TIMESTAMP + description: '{{ doc("evm_inserted_timestamp") }}' + - name: MODIFIED_TIMESTAMP + description: '{{ doc("evm_modified_timestamp") }}' \ No newline at end of file diff --git a/models/testnet/core/gold/testnet__fact_blocks.sql b/models/testnet/core/gold/testnet__fact_blocks.sql new file mode 100644 index 0000000..604a920 --- /dev/null +++ b/models/testnet/core/gold/testnet__fact_blocks.sql @@ -0,0 +1,73 @@ +{{ config ( + materialized = "incremental", + incremental_strategy = 'delete+insert', + unique_key = "block_number", + cluster_by = ['block_timestamp::DATE'], + tags = ['gold_testnet'] +) }} + +SELECT + block_number, + block_json :hash :: STRING AS block_hash, + utils.udf_hex_to_int( + block_json :timestamp :: STRING + ) :: TIMESTAMP AS block_timestamp, + 'testnet' AS network, + ARRAY_SIZE( + block_json :transactions + ) AS tx_count, + utils.udf_hex_to_int( + block_json :size :: STRING + ) :: bigint AS SIZE, + block_json :miner :: STRING AS miner, + block_json :mixHash :: STRING AS mix_hash, + block_json :extraData :: STRING AS extra_data, + block_json :parentHash :: STRING AS parent_hash, + utils.udf_hex_to_int( + block_json :gasUsed :: STRING + ) :: bigint AS gas_used, + utils.udf_hex_to_int( + block_json :gasLimit :: STRING + ) :: bigint AS gas_limit, + utils.udf_hex_to_int( + block_json :baseFeePerGas :: STRING + ) :: bigint AS base_fee_per_gas, + utils.udf_hex_to_int( + block_json :difficulty :: STRING + ) :: bigint AS difficulty, + utils.udf_hex_to_int( + block_json :totalDifficulty :: STRING + ) :: bigint AS total_difficulty, + block_json :sha3Uncles :: STRING AS sha3_uncles, + block_json :uncles AS uncle_blocks, + utils.udf_hex_to_int( + block_json :nonce :: STRING + ) :: bigint AS nonce, + block_json :receiptsRoot :: STRING AS receipts_root, + block_json :stateRoot :: STRING AS state_root, + block_json :transactionsRoot :: STRING AS transactions_root, + block_json :logsBloom :: STRING AS logs_bloom, + utils.udf_hex_to_int( + block_json :blobGasUsed :: STRING + ) :: bigint AS blob_gas_used, + utils.udf_hex_to_int( + block_json :excessBlobGas :: STRING + ) :: bigint AS excess_blob_gas, + block_json :parentBeaconBlockRoot :: STRING AS parent_beacon_block_root, + block_json :withdrawals AS withdrawals, + block_json :withdrawalsRoot :: STRING AS withdrawals_root, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS fact_blocks_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp +FROM + {{ ref('silver_testnet__blocks') }} +WHERE 1=1 + +{% if is_incremental() %} +AND modified_timestamp > ( + SELECT + COALESCE(MAX(modified_timestamp), '1970-01-01' :: TIMESTAMP) AS modified_timestamp + FROM + {{ this }} + ) +{% endif %} \ No newline at end of file diff --git a/models/testnet/core/gold/testnet__fact_blocks.yml b/models/testnet/core/gold/testnet__fact_blocks.yml new file mode 100644 index 0000000..32e46e4 --- /dev/null +++ b/models/testnet/core/gold/testnet__fact_blocks.yml @@ -0,0 +1,66 @@ +version: 2 +models: + - name: testnet__fact_blocks + description: '{{ doc("evm_blocks_table_doc") }}' + + columns: + - name: BLOCK_NUMBER + description: '{{ doc("evm_block_number") }}' + - name: BLOCK_HASH + description: '{{ doc("evm_blocks_hash") }}' + - name: BLOCK_TIMESTAMP + description: '{{ doc("evm_block_timestamp") }}' + - name: NETWORK + description: '{{ doc("evm_network") }}' + - name: TX_COUNT + description: '{{ doc("evm_tx_count") }}' + - name: SIZE + description: '{{ doc("evm_size") }}' + - name: MINER + description: '{{ doc("evm_miner") }}' + - name: BASE_FEE_PER_GAS + description: '{{ doc("evm_base_fee_per_gas") }}' + - name: MIX_HASH + description: '{{ doc("evm_mix_hash") }}' + - name: EXTRA_DATA + description: '{{ doc("evm_extra_data") }}' + - name: PARENT_HASH + description: '{{ doc("evm_parent_hash") }}' + - name: GAS_USED + description: '{{ doc("evm_gas_used") }}' + - name: GAS_LIMIT + description: '{{ doc("evm_gas_limit") }}' + - name: DIFFICULTY + description: '{{ doc("evm_difficulty") }}' + - name: TOTAL_DIFFICULTY + description: '{{ doc("evm_total_difficulty") }}' + - name: SHA3_UNCLES + description: '{{ doc("evm_sha3_uncles") }}' + - name: UNCLE_BLOCKS + description: '{{ doc("evm_uncle_blocks") }}' + - name: NONCE + description: '{{ doc("evm_blocks_nonce") }}' + - name: RECEIPTS_ROOT + description: '{{ doc("evm_receipts_root") }}' + - name: STATE_ROOT + description: '{{ doc("evm_state_root") }}' + - name: TRANSACTIONS_ROOT + description: '{{ doc("evm_transactions_root") }}' + - name: LOGS_BLOOM + description: '{{ doc("evm_logs_bloom") }}' + - name: BLOB_GAS_USED + description: '{{ doc("evm_blob_gas_used") }}' + - name: EXCESS_BLOB_GAS + description: '{{ doc("evm_excess_blob_gas") }}' + - name: PARENT_BEACON_BLOCK_ROOT + description: '{{ doc("evm_parent_beacon_block_root") }}' + - name: WITHDRAWALS + description: '{{ doc("evm_withdrawals") }}' + - name: WITHDRAWALS_ROOT + description: '{{ doc("evm_withdrawals_root") }}' + - name: FACT_BLOCKS_ID + description: '{{ doc("evm_pk") }}' + - name: INSERTED_TIMESTAMP + description: '{{ doc("evm_inserted_timestamp") }}' + - name: MODIFIED_TIMESTAMP + description: '{{ doc("evm_modified_timestamp") }}' \ No newline at end of file diff --git a/models/testnet/core/gold/testnet__fact_event_logs.sql b/models/testnet/core/gold/testnet__fact_event_logs.sql new file mode 100644 index 0000000..139a8e6 --- /dev/null +++ b/models/testnet/core/gold/testnet__fact_event_logs.sql @@ -0,0 +1,217 @@ +{{ config ( + materialized = "incremental", + incremental_strategy = 'delete+insert', + unique_key = "block_number", + cluster_by = ['block_timestamp::DATE'], + tags = ['gold_testnet'] +) }} + +WITH base AS ( + + SELECT + block_number, + receipts_json :transactionHash :: STRING AS tx_hash, + receipts_json, + receipts_json :logs AS full_logs + FROM + {{ ref('silver_testnet__receipts') }} + WHERE + 1 = 1 + AND ARRAY_SIZE(receipts_json :logs) > 0 + +{% if is_incremental() %} +AND modified_timestamp > ( + SELECT + COALESCE(MAX(modified_timestamp), '1970-01-01' :: TIMESTAMP) AS modified_timestamp + FROM + {{ this }}) + {% endif %} +), +flattened_logs AS ( + SELECT + block_number, + tx_hash, + receipts_json :from :: STRING AS origin_from_address, + receipts_json :to :: STRING AS origin_to_address, + CASE + WHEN receipts_json :status :: STRING = '0x1' THEN TRUE + WHEN receipts_json :status :: STRING = '0x0' THEN FALSE + ELSE NULL + END AS tx_succeeded, + VALUE :address :: STRING AS contract_address, + VALUE :blockHash :: STRING AS block_hash, + VALUE :blockNumber :: STRING AS block_number_hex, + VALUE :data :: STRING AS DATA, + utils.udf_hex_to_int( + VALUE :logIndex :: STRING + ) :: INT AS event_index, + VALUE :removed :: BOOLEAN AS event_removed, + VALUE :topics AS topics, + VALUE :transactionHash :: STRING AS transaction_hash, + utils.udf_hex_to_int( + VALUE :transactionIndex :: STRING + ) :: INT AS transaction_index + FROM + base, + LATERAL FLATTEN ( + input => full_logs + ) +), +new_logs AS ( + SELECT + l.block_number, + b.block_timestamp, + l.tx_hash, + l.transaction_index AS tx_position, + l.event_index, + l.contract_address, + l.topics, + l.topics [0] :: STRING AS topic_0, + l.topics [1] :: STRING AS topic_1, + l.topics [2] :: STRING AS topic_2, + l.topics [3] :: STRING AS topic_3, + l.data, + l.event_removed, + l.origin_from_address, + l.origin_to_address, + txs.origin_function_signature, + l.tx_succeeded + FROM + flattened_logs l + LEFT JOIN {{ ref('testnet__fact_blocks') }} + b + ON l.block_number = b.block_number + +{% if is_incremental() %} +AND b.modified_timestamp >= ( + SELECT + MAX(modified_timestamp) :: DATE - 1 + FROM + {{ this }} +) +{% endif %} +LEFT JOIN {{ ref('testnet__fact_transactions') }} +txs +ON l.tx_hash = txs.tx_hash +AND l.block_number = txs.block_number + +{% if is_incremental() %} +AND txs.modified_timestamp >= ( + SELECT + MAX(modified_timestamp) :: DATE - 1 + FROM + {{ this }} +) +{% endif %} +) + +{% if is_incremental() %}, +missing_data AS ( + SELECT + t.block_number, + b.block_timestamp AS block_timestamp_heal, + t.tx_hash, + t.tx_position, + t.event_index, + t.contract_address, + t.topics, + t.topic_0, + t.topic_1, + t.topic_2, + t.topic_3, + t.data, + t.event_removed, + t.origin_from_address, + t.origin_to_address, + txs.origin_function_signature AS origin_function_signature_heal, + t.tx_succeeded + FROM + {{ this }} + t + LEFT JOIN {{ ref('testnet__fact_transactions') }} + txs + ON t.tx_hash = txs.tx_hash + AND t.block_number = txs.block_number + LEFT JOIN {{ ref('testnet__fact_blocks') }} + b + ON t.block_number = b.block_number + WHERE + t.block_timestamp IS NULL + OR t.origin_function_signature IS NULL +) +{% endif %}, +all_logs AS ( + SELECT + block_number, + block_timestamp, + tx_hash, + tx_position, + event_index, + contract_address, + topics, + topic_0, + topic_1, + topic_2, + topic_3, + DATA, + event_removed, + origin_from_address, + origin_to_address, + origin_function_signature, + tx_succeeded + FROM + new_logs + +{% if is_incremental() %} +UNION ALL +SELECT + block_number, + block_timestamp_heal AS block_timestamp, + tx_hash, + tx_position, + event_index, + contract_address, + topics, + topic_0, + topic_1, + topic_2, + topic_3, + DATA, + event_removed, + origin_from_address, + origin_to_address, + origin_function_signature_heal AS origin_function_signature, + tx_succeeded +FROM + missing_data +{% endif %} +) +SELECT + block_number, + block_timestamp, + tx_hash, + tx_position, + event_index, + contract_address, + topics, + topic_0, + topic_1, + topic_2, + topic_3, + DATA, + event_removed, + origin_from_address, + origin_to_address, + origin_function_signature, + tx_succeeded, + {{ dbt_utils.generate_surrogate_key(['tx_hash','event_index']) }} AS fact_event_logs_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp +FROM + all_logs qualify ROW_NUMBER() over ( + PARTITION BY fact_event_logs_id + ORDER BY + block_number DESC, + block_timestamp DESC nulls last, + origin_function_signature DESC nulls last + ) = 1 \ No newline at end of file diff --git a/models/testnet/core/gold/testnet__fact_event_logs.yml b/models/testnet/core/gold/testnet__fact_event_logs.yml new file mode 100644 index 0000000..4b063be --- /dev/null +++ b/models/testnet/core/gold/testnet__fact_event_logs.yml @@ -0,0 +1,46 @@ +version: 2 +models: + - name: testnet__fact_event_logs + description: '{{ doc("evm_logs_table_doc") }}' + + columns: + - name: BLOCK_NUMBER + description: '{{ doc("evm_block_number") }}' + - name: BLOCK_TIMESTAMP + description: '{{ doc("evm_block_timestamp") }}' + - name: TX_HASH + description: '{{ doc("evm_tx_hash") }}' + - name: TX_POSITION + description: '{{ doc("evm_tx_position") }}' + - name: EVENT_INDEX + description: '{{ doc("evm_event_index") }}' + - name: CONTRACT_ADDRESS + description: '{{ doc("evm_logs_contract_address") }}' + - name: TOPICS + description: '{{ doc("evm_topics") }}' + - name: TOPIC_0 + description: '{{ doc("evm_topic_0") }}' + - name: TOPIC_1 + description: '{{ doc("evm_topic_1") }}' + - name: TOPIC_2 + description: '{{ doc("evm_topic_2") }}' + - name: TOPIC_3 + description: '{{ doc("evm_topic_3") }}' + - name: DATA + description: '{{ doc("evm_logs_data") }}' + - name: EVENT_REMOVED + description: '{{ doc("evm_event_removed") }}' + - name: ORIGIN_FROM_ADDRESS + description: '{{ doc("evm_from_address") }}' + - name: ORIGIN_TO_ADDRESS + description: '{{ doc("evm_to_address") }}' + - name: ORIGIN_FUNCTION_SIGNATURE + description: '{{ doc("evm_origin_sig") }}' + - name: TX_SUCCEEDED + description: '{{ doc("evm_tx_succeeded") }}' + - name: FACT_EVENT_LOGS_ID + description: '{{ doc("evm_pk") }}' + - name: INSERTED_TIMESTAMP + description: '{{ doc("evm_inserted_timestamp") }}' + - name: MODIFIED_TIMESTAMP + description: '{{ doc("evm_modified_timestamp") }}' \ No newline at end of file diff --git a/models/testnet/core/gold/testnet__fact_traces.sql b/models/testnet/core/gold/testnet__fact_traces.sql new file mode 100644 index 0000000..3fe8c6b --- /dev/null +++ b/models/testnet/core/gold/testnet__fact_traces.sql @@ -0,0 +1,420 @@ +{{ config ( + materialized = "incremental", + incremental_strategy = 'delete+insert', + unique_key = "block_number", + cluster_by = ['block_timestamp::DATE'], + tags = ['gold_testnet'] +) }} + +WITH silver_traces AS ( + SELECT + block_number, + tx_position, + trace_address, + parent_trace_address, + trace_address_array, + trace_json, + traces_id, + 'regular' AS source + FROM + {{ ref( + 'silver_testnet__traces' + ) }} + WHERE + 1 = 1 + +{% if is_incremental() %} +AND modified_timestamp > ( + SELECT + COALESCE(MAX(modified_timestamp), '1970-01-01' :: TIMESTAMP) AS modified_timestamp + FROM + {{ this }} +) +{% endif %} + +), +sub_traces AS ( + SELECT + block_number, + tx_position, + parent_trace_address, + COUNT(*) AS sub_traces + FROM + silver_traces + GROUP BY + block_number, + tx_position, + parent_trace_address +), +trace_index_array AS ( + SELECT + block_number, + tx_position, + trace_address, + ARRAY_AGG(flat_value) AS number_array + FROM + ( + SELECT + block_number, + tx_position, + trace_address, + IFF( + VALUE :: STRING = 'ORIGIN', + -1, + VALUE :: INT + ) AS flat_value + FROM + silver_traces, + LATERAL FLATTEN ( + input => trace_address_array + ) + ) + GROUP BY + block_number, + tx_position, + trace_address +), +trace_index_sub_traces AS ( + SELECT + b.block_number, + b.tx_position, + b.trace_address, + IFNULL( + sub_traces, + 0 + ) AS sub_traces, + number_array, + ROW_NUMBER() over ( + PARTITION BY b.block_number, b.tx_position + ORDER BY + number_array ASC + ) - 1 AS trace_index, + b.trace_json, + b.traces_id, + b.source + FROM + silver_traces b + LEFT JOIN sub_traces s + ON b.block_number = s.block_number + AND b.tx_position = s.tx_position + AND b.trace_address = s.parent_trace_address + JOIN trace_index_array n + ON b.block_number = n.block_number + AND b.tx_position = n.tx_position + AND b.trace_address = n.trace_address +), +errored_traces AS ( + SELECT + block_number, + tx_position, + trace_address, + trace_json + FROM + trace_index_sub_traces + WHERE + trace_json :error :: STRING IS NOT NULL +), +error_logic AS ( + SELECT + b0.block_number, + b0.tx_position, + b0.trace_address, + b0.trace_json :error :: STRING AS error, + b1.trace_json :error :: STRING AS any_error, + b2.trace_json :error :: STRING AS origin_error + FROM + trace_index_sub_traces b0 + LEFT JOIN errored_traces b1 + ON b0.block_number = b1.block_number + AND b0.tx_position = b1.tx_position + AND b0.trace_address RLIKE CONCAT('^', b1.trace_address, '(_[0-9]+)*$') + LEFT JOIN errored_traces b2 + ON b0.block_number = b2.block_number + AND b0.tx_position = b2.tx_position + AND b2.trace_address = 'ORIGIN' +), +aggregated_errors AS ( + SELECT + block_number, + tx_position, + trace_address, + error, + IFF(MAX(any_error) IS NULL + AND error IS NULL + AND origin_error IS NULL, TRUE, FALSE) AS trace_succeeded + FROM + error_logic + GROUP BY + block_number, + tx_position, + trace_address, + error, + origin_error +), +json_traces AS ( + SELECT + block_number, + tx_position, + trace_address, + sub_traces, + number_array, + trace_index, + trace_succeeded, + trace_json :error :: STRING AS error_reason, + trace_json :revertReason :: STRING AS revert_reason, + trace_json :from :: STRING AS from_address, + trace_json :to :: STRING AS to_address, + IFNULL( + trace_json :value :: STRING, + '0x0' + ) AS value_hex, + IFNULL( + utils.udf_hex_to_int( + trace_json :value :: STRING + ), + '0' + ) AS value_precise_raw, + utils.udf_decimal_adjust( + value_precise_raw, + 18 + ) AS value_precise, + value_precise :: FLOAT AS VALUE, + utils.udf_hex_to_int( + trace_json :gas :: STRING + ) :: INT AS gas, + utils.udf_hex_to_int( + trace_json :gasUsed :: STRING + ) :: INT AS gas_used, + trace_json :input :: STRING AS input, + trace_json :output :: STRING AS output, + trace_json :type :: STRING AS TYPE, + traces_id + FROM + trace_index_sub_traces + JOIN aggregated_errors USING ( + block_number, + tx_position, + trace_address + ) + ), + incremental_traces AS ( + SELECT + f.block_number, + t.tx_hash, + t.block_timestamp, + t.origin_function_signature, + t.from_address AS origin_from_address, + t.to_address AS origin_to_address, + t.tx_position AS tx_position, + f.trace_index, + f.from_address AS from_address, + f.to_address AS to_address, + f.value_hex, + f.value_precise_raw, + f.value_precise, + f.value, + f.gas, + f.gas_used, + f.input, + f.output, + f.type, + f.sub_traces, + f.error_reason, + f.revert_reason, + f.traces_id, + f.trace_succeeded, + f.trace_address, + t.tx_succeeded + FROM + json_traces f + LEFT OUTER JOIN {{ ref('testnet__fact_transactions') }} + t + ON f.tx_position = t.tx_position + AND f.block_number = t.block_number + +{% if is_incremental() %} +AND t.modified_timestamp >= ( + SELECT + DATEADD('hour', -24, MAX(modified_timestamp)) + FROM + {{ this }}) + {% endif %} +) + +{% if is_incremental() %}, +overflow_blocks AS ( + SELECT + DISTINCT block_number + FROM + silver_traces + WHERE + source = 'overflow' +), +heal_missing_data AS ( + SELECT + t.block_number, + txs.tx_hash, + txs.block_timestamp AS block_timestamp_heal, + txs.origin_function_signature AS origin_function_signature_heal, + txs.from_address AS origin_from_address_heal, + txs.to_address AS origin_to_address_heal, + t.tx_position, + t.trace_index, + t.from_address, + t.to_address, + t.value_hex, + t.value_precise_raw, + t.value_precise, + t.value, + t.gas, + t.gas_used, + t.input, + t.output, + t.type, + t.sub_traces, + t.error_reason, + t.revert_reason, + t.fact_traces_id AS traces_id, + t.trace_succeeded, + t.trace_address, + txs.tx_succeeded AS tx_succeeded_heal + FROM + {{ this }} + t + JOIN {{ ref('testnet__fact_transactions') }} + txs + ON t.tx_position = txs.tx_position + AND t.block_number = txs.block_number + WHERE + t.tx_position IS NULL + OR t.block_timestamp IS NULL + OR t.tx_succeeded IS NULL +) +{% endif %}, +all_traces AS ( + SELECT + block_number, + tx_hash, + block_timestamp, + origin_function_signature, + origin_from_address, + origin_to_address, + tx_position, + trace_index, + from_address, + to_address, + value_hex, + value_precise_raw, + value_precise, + VALUE, + gas, + gas_used, + input, + output, + TYPE, + sub_traces, + error_reason, + revert_reason, + trace_succeeded, + trace_address, + tx_succeeded + FROM + incremental_traces + +{% if is_incremental() %} +UNION ALL +SELECT + block_number, + tx_hash, + block_timestamp_heal AS block_timestamp, + origin_function_signature_heal AS origin_function_signature, + origin_from_address_heal AS origin_from_address, + origin_to_address_heal AS origin_to_address, + tx_position, + trace_index, + from_address, + to_address, + value_hex, + value_precise_raw, + value_precise, + VALUE, + gas, + gas_used, + input, + output, + TYPE, + sub_traces, + error_reason, + revert_reason, + trace_succeeded, + trace_address, + tx_succeeded_heal AS tx_succeeded +FROM + heal_missing_data +UNION ALL +SELECT + block_number, + tx_hash, + block_timestamp, + origin_function_signature, + origin_from_address, + origin_to_address, + tx_position, + trace_index, + from_address, + to_address, + value_hex, + value_precise_raw, + value_precise, + VALUE, + gas, + gas_used, + input, + output, + TYPE, + sub_traces, + error_reason, + revert_reason, + trace_succeeded, + trace_address, + tx_succeeded +FROM + {{ this }} + JOIN overflow_blocks USING (block_number) +{% endif %} +) +SELECT + block_number, + block_timestamp, + tx_hash, + tx_position, + trace_index, + from_address, + to_address, + input, + output, + TYPE, + trace_address, + sub_traces, + VALUE, + value_precise_raw, + value_precise, + value_hex, + gas, + gas_used, + origin_from_address, + origin_to_address, + origin_function_signature, + trace_succeeded, + error_reason, + revert_reason, + tx_succeeded, + {{ dbt_utils.generate_surrogate_key( + ['tx_hash', 'trace_index'] + ) }} AS fact_traces_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp +FROM + all_traces qualify(ROW_NUMBER() over(PARTITION BY block_number, tx_position, trace_index +ORDER BY + modified_timestamp DESC, block_timestamp DESC nulls last)) = 1 \ No newline at end of file diff --git a/models/testnet/core/gold/testnet__fact_traces.yml b/models/testnet/core/gold/testnet__fact_traces.yml new file mode 100644 index 0000000..4a2d1db --- /dev/null +++ b/models/testnet/core/gold/testnet__fact_traces.yml @@ -0,0 +1,62 @@ +version: 2 +models: + - name: testnet__fact_traces + description: '{{ doc("evm_traces_table_doc") }}' + + columns: + - name: BLOCK_NUMBER + description: '{{ doc("evm_block_number") }}' + - name: BLOCK_TIMESTAMP + description: '{{ doc("evm_block_timestamp") }}' + - name: TX_HASH + description: '{{ doc("evm_tx_hash") }}' + - name: TX_POSITION + description: '{{ doc("evm_tx_position") }}' + - name: TRACE_INDEX + description: '{{ doc("evm_trace_index") }}' + - name: FROM_ADDRESS + description: '{{ doc("evm_from_address") }}' + - name: TO_ADDRESS + description: '{{ doc("evm_to_address") }}' + - name: INPUT + description: '{{ doc("evm_traces_input") }}' + - name: OUTPUT + description: '{{ doc("evm_traces_output") }}' + - name: TYPE + description: '{{ doc("evm_traces_type") }}' + - name: TRACE_ADDRESS + description: '{{ doc("evm_trace_address") }}' + - name: SUB_TRACES + description: '{{ doc("evm_sub_traces") }}' + - name: VALUE + description: '{{ doc("evm_value") }}' + - name: VALUE_PRECISE_RAW + description: '{{ doc("evm_precise_amount_unadjusted") }}' + - name: VALUE_PRECISE + description: '{{ doc("evm_precise_amount_adjusted") }}' + - name: VALUE_HEX + description: '{{ doc("evm_value_hex") }}' + - name: GAS + description: '{{ doc("evm_traces_gas") }}' + - name: GAS_USED + description: '{{ doc("evm_traces_gas_used") }}' + - name: ORIGIN_FROM_ADDRESS + description: '{{ doc("evm_traces_from") }}' + - name: ORIGIN_TO_ADDRESS + description: '{{ doc("evm_traces_to") }}' + - name: ORIGIN_FUNCTION_SIGNATURE + description: '{{ doc("evm_origin_sig") }}' + - name: TRACE_SUCCEEDED + description: '{{ doc("evm_trace_succeeded") }}' + - name: ERROR_REASON + description: '{{ doc("evm_trace_error_reason") }}' + - name: REVERT_REASON + description: '{{ doc("evm_revert_reason") }}' + - name: TX_SUCCEEDED + description: '{{ doc("evm_tx_succeeded") }}' + - name: FACT_TRACES_ID + description: '{{ doc("evm_pk") }}' + - name: INSERTED_TIMESTAMP + description: '{{ doc("evm_inserted_timestamp") }}' + - name: MODIFIED_TIMESTAMP + description: '{{ doc("evm_modified_timestamp") }}' \ No newline at end of file diff --git a/models/testnet/core/gold/testnet__fact_transactions.sql b/models/testnet/core/gold/testnet__fact_transactions.sql new file mode 100644 index 0000000..7907452 --- /dev/null +++ b/models/testnet/core/gold/testnet__fact_transactions.sql @@ -0,0 +1,346 @@ +{{ config ( + materialized = "incremental", + incremental_strategy = 'delete+insert', + unique_key = "block_number", + cluster_by = ['block_timestamp::DATE'], + tags = ['gold_testnet'] +) }} + +WITH base AS ( + + SELECT + block_number, + tx_position, + transaction_json + FROM + {{ ref('silver_testnet__transactions') }} + +{% if is_incremental() %} +WHERE + modified_timestamp > ( + SELECT + COALESCE(MAX(modified_timestamp), '1970-01-01' :: TIMESTAMP) AS modified_timestamp + FROM + {{ this }}) + {% endif %} + ), + transactions_fields AS ( + SELECT + block_number, + tx_position, + transaction_json :blockHash :: STRING AS block_hash, + transaction_json :blockNumber :: STRING AS block_number_hex, + transaction_json :from :: STRING AS from_address, + utils.udf_hex_to_int( + transaction_json :gas :: STRING + ) :: bigint AS gas_limit, + utils.udf_hex_to_int( + transaction_json :gasPrice :: STRING + ) :: bigint AS gas_price, + transaction_json :hash :: STRING AS tx_hash, + transaction_json :input :: STRING AS input_data, + LEFT( + input_data, + 10 + ) AS origin_function_signature, + utils.udf_hex_to_int( + transaction_json :nonce :: STRING + ) :: bigint AS nonce, + transaction_json :r :: STRING AS r, + transaction_json :s :: STRING AS s, + transaction_json :to :: STRING AS to_address1, + CASE + WHEN to_address1 = '' THEN NULL + ELSE to_address1 + END AS to_address, + utils.udf_hex_to_int( + transaction_json :transactionIndex :: STRING + ) :: bigint AS transaction_index, + utils.udf_hex_to_int( + transaction_json :type :: STRING + ) :: bigint AS tx_type, + utils.udf_hex_to_int( + transaction_json :v :: STRING + ) :: bigint AS v, + TRY_TO_NUMBER( + utils.udf_hex_to_int( + transaction_json :maxFeePerGas :: STRING + ) + ) / pow( + 10, + 9 + ) AS max_fee_per_gas, + TRY_TO_NUMBER( + utils.udf_hex_to_int( + transaction_json :maxPriorityFeePerGas :: STRING + ) + ) / pow( + 10, + 9 + ) AS max_priority_fee_per_gas, + utils.udf_hex_to_int( + transaction_json :value :: STRING + ) AS value_precise_raw, + utils.udf_decimal_adjust( + value_precise_raw, + 18 + ) AS value_precise, + value_precise :: FLOAT AS VALUE, + utils.udf_hex_to_int(transaction_json :yParity :: STRING):: bigint AS y_parity, + transaction_json :accessList AS access_list + FROM + base + ), + new_transactions AS ( + SELECT + txs.block_number, + txs.block_hash, + b.block_timestamp, + txs.tx_hash, + txs.from_address, + txs.to_address, + txs.origin_function_signature, + txs.value, + txs.value_precise_raw, + txs.value_precise, + txs.max_fee_per_gas, + txs.max_priority_fee_per_gas, + txs.y_parity, + txs.access_list, + utils.udf_decimal_adjust( + txs.gas_price * utils.udf_hex_to_int( + r.receipts_json :gasUsed :: STRING + ) :: bigint, + 18 + ) AS tx_fee_precise, + COALESCE( + tx_fee_precise :: FLOAT, + 0 + ) AS tx_fee, + CASE + WHEN r.receipts_json :status :: STRING = '0x1' THEN TRUE + WHEN r.receipts_json :status :: STRING = '0x0' THEN FALSE + ELSE NULL + END AS tx_succeeded, + txs.tx_type, + txs.nonce, + txs.tx_position, + txs.input_data, + txs.gas_price / pow( + 10, + 9 + ) AS gas_price, + utils.udf_hex_to_int( + r.receipts_json :gasUsed :: STRING + ) :: bigint AS gas_used, + txs.gas_limit, + utils.udf_hex_to_int( + r.receipts_json :cumulativeGasUsed :: STRING + ) :: bigint AS cumulative_gas_used, + utils.udf_hex_to_int( + r.receipts_json :effectiveGasPrice :: STRING + ) :: bigint AS effective_gas_price, + txs.r, + txs.s, + txs.v + FROM + transactions_fields txs + LEFT JOIN {{ ref('testnet__fact_blocks') }} + b + ON txs.block_number = b.block_number + +{% if is_incremental() %} +AND b.modified_timestamp >= ( + SELECT + MAX(modified_timestamp) :: DATE - 1 + FROM + {{ this }} +) +{% endif %} +LEFT JOIN {{ ref('silver_testnet__receipts') }} +r +ON txs.block_number = r.block_number +AND txs.tx_hash = r.receipts_json :transactionHash :: STRING + +{% if is_incremental() %} +AND r.modified_timestamp >= ( + SELECT + MAX(modified_timestamp) :: DATE - 1 + FROM + {{ this }} +) +{% endif %} +) + +{% if is_incremental() %}, +missing_data AS ( + SELECT + t.block_number, + b.block_timestamp AS block_timestamp_heal, + t.tx_hash, + t.from_address, + t.to_address, + t.origin_function_signature, + t.value, + t.value_precise_raw, + t.value_precise, + t.max_fee_per_gas, + t.max_priority_fee_per_gas, + t.y_parity, + t.access_list, + utils.udf_decimal_adjust( + t.gas_price * utils.udf_hex_to_int( + r.receipts_json :gasUsed :: STRING + ) :: bigint, + 9 + ) AS tx_fee_precise_heal, + COALESCE( + tx_fee_precise_heal :: FLOAT, + 0 + ) AS tx_fee_heal, + CASE + WHEN r.receipts_json :status :: STRING = '0x1' THEN TRUE + WHEN r.receipts_json :status :: STRING = '0x0' THEN FALSE + ELSE NULL + END AS tx_succeeded_heal, + t.tx_type, + t.nonce, + t.tx_position, + t.input_data, + t.gas_price, + utils.udf_hex_to_int( + r.receipts_json :gasUsed :: STRING + ) :: bigint AS gas_used_heal, + t.gas_limit, + utils.udf_hex_to_int( + r.receipts_json :cumulativeGasUsed :: STRING + ) :: bigint AS cumulative_gas_used_heal, + utils.udf_hex_to_int( + r.receipts_json :effectiveGasPrice :: STRING + ) :: bigint AS effective_gas_price_heal, + t.r, + t.s, + t.v + FROM + {{ this }} + t + LEFT JOIN {{ ref('testnet__fact_blocks') }} + b + ON t.block_number = b.block_number + LEFT JOIN {{ ref('silver_testnet__receipts') }} + r + ON t.block_number = r.block_number + AND t.tx_hash = r.receipts_json :transactionHash :: STRING + WHERE + t.block_timestamp IS NULL + OR t.tx_succeeded IS NULL +) +{% endif %}, +all_transactions AS ( + SELECT + block_number, + block_timestamp, + tx_hash, + from_address, + to_address, + origin_function_signature, + VALUE, + value_precise_raw, + value_precise, + max_fee_per_gas, + max_priority_fee_per_gas, + y_parity, + access_list, + tx_fee, + tx_fee_precise, + tx_succeeded, + tx_type, + nonce, + tx_position, + input_data, + gas_price, + gas_used, + gas_limit, + cumulative_gas_used, + effective_gas_price, + r, + s, + v + FROM + new_transactions + +{% if is_incremental() %} +UNION ALL +SELECT + block_number, + block_timestamp_heal AS block_timestamp, + tx_hash, + from_address, + to_address, + origin_function_signature, + VALUE, + value_precise_raw, + value_precise, + max_fee_per_gas, + max_priority_fee_per_gas, + y_parity, + access_list, + tx_fee_heal AS tx_fee, + tx_fee_precise_heal AS tx_fee_precise, + tx_succeeded_heal AS tx_succeeded, + tx_type, + nonce, + tx_position, + input_data, + gas_price, + gas_used_heal AS gas_used, + gas_limit, + cumulative_gas_used_heal AS cumulative_gas_used, + effective_gas_price_heal AS effective_gas_price, + r, + s, + v +FROM + missing_data +{% endif %} +) +SELECT + block_number, + block_timestamp, + tx_hash, + from_address, + to_address, + origin_function_signature, + VALUE, + value_precise_raw, + value_precise, + tx_fee, + tx_fee_precise, + tx_succeeded, + tx_type, + nonce, + tx_position, + input_data, + gas_price, + gas_used, + gas_limit, + cumulative_gas_used, + effective_gas_price, + max_fee_per_gas, + max_priority_fee_per_gas, + y_parity, + access_list, + r, + s, + v, + {{ dbt_utils.generate_surrogate_key(['tx_hash']) }} AS fact_transactions_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp +FROM + all_transactions qualify ROW_NUMBER() over ( + PARTITION BY fact_transactions_id + ORDER BY + block_number DESC, + block_timestamp DESC nulls last, + tx_succeeded DESC nulls last + ) = 1 \ No newline at end of file diff --git a/models/testnet/core/gold/testnet__fact_transactions.yml b/models/testnet/core/gold/testnet__fact_transactions.yml new file mode 100644 index 0000000..e7a7b53 --- /dev/null +++ b/models/testnet/core/gold/testnet__fact_transactions.yml @@ -0,0 +1,72 @@ +version: 2 +models: + - name: testnet__fact_transactions + description: '{{ doc("evm_tx_table_doc") }}' + + columns: + - name: BLOCK_NUMBER + description: '{{ doc("evm_block_number") }}' + - name: BLOCK_TIMESTAMP + description: '{{ doc("evm_block_timestamp") }}' + - name: TX_HASH + description: '{{ doc("evm_tx_hash") }}' + - name: FROM_ADDRESS + description: '{{ doc("evm_from_address") }}' + - name: TO_ADDRESS + description: '{{ doc("evm_to_address") }}' + - name: ORIGIN_FUNCTION_SIGNATURE + description: '{{ doc("evm_tx_origin_sig") }}' + - name: VALUE + description: '{{ doc("evm_value") }}' + - name: VALUE_PRECISE_RAW + description: '{{ doc("evm_precise_amount_unadjusted") }}' + - name: VALUE_PRECISE + description: '{{ doc("evm_precise_amount_adjusted") }}' + - name: TX_FEE + description: '{{ doc("evm_tx_fee") }}' + - name: TX_FEE_PRECISE + description: '{{ doc("evm_tx_fee_precise") }}' + - name: TX_SUCCEEDED + description: '{{ doc("evm_tx_succeeded") }}' + - name: TX_TYPE + description: '{{ doc("evm_tx_type") }}' + - name: NONCE + description: '{{ doc("evm_tx_nonce") }}' + - name: TX_POSITION + description: '{{ doc("evm_tx_position") }}' + - name: INPUT_DATA + description: '{{ doc("evm_tx_input_data") }}' + - name: GAS_PRICE + description: '{{ doc("evm_tx_gas_price") }}' + - name: GAS_USED + description: '{{ doc("evm_tx_gas_used") }}' + - name: GAS_LIMIT + description: '{{ doc("evm_tx_gas_limit") }}' + - name: CUMULATIVE_GAS_USED + description: '{{ doc("evm_cumulative_gas_used") }}' + - name: EFFECTIVE_GAS_PRICE + description: '{{ doc("evm_effective_gas_price") }}' + - name: R + description: '{{ doc("evm_r") }}' + - name: S + description: '{{ doc("evm_s") }}' + - name: V + description: '{{ doc("evm_v") }}' + - name: MAX_FEE_PER_GAS + description: '{{ doc("evm_max_fee_per_gas") }}' + - name: MAX_PRIORITY_FEE_PER_GAS + description: '{{ doc("evm_max_priority_fee_per_gas") }}' + - name: L1_FEE + description: '{{ doc("evm_l1_fee") }}' + - name: L1_FEE_PRECISE_RAW + description: '{{ doc("evm_l1_fee_precise_raw") }}' + - name: Y_PARITY + description: '{{ doc("evm_y_parity") }}' + - name: ACCESS_LIST + description: '{{ doc("evm_access_list") }}' + - name: FACT_TRANSACTIONS_ID + description: '{{ doc("evm_pk") }}' + - name: INSERTED_TIMESTAMP + description: '{{ doc("evm_inserted_timestamp") }}' + - name: MODIFIED_TIMESTAMP + description: '{{ doc("evm_modified_timestamp") }}' \ No newline at end of file diff --git a/models/testnet/core/gold/tests/blocks/test_gold_testnet__fact_blocks_full.sql b/models/testnet/core/gold/tests/blocks/test_gold_testnet__fact_blocks_full.sql new file mode 100644 index 0000000..4b3bca5 --- /dev/null +++ b/models/testnet/core/gold/tests/blocks/test_gold_testnet__fact_blocks_full.sql @@ -0,0 +1,9 @@ +{{ config ( + materialized = "view", + tags = ['full_test'] +) }} + +SELECT + * +FROM + {{ ref('testnet__fact_blocks') }} diff --git a/models/testnet/core/gold/tests/blocks/test_gold_testnet__fact_blocks_full.yml b/models/testnet/core/gold/tests/blocks/test_gold_testnet__fact_blocks_full.yml new file mode 100644 index 0000000..8291010 --- /dev/null +++ b/models/testnet/core/gold/tests/blocks/test_gold_testnet__fact_blocks_full.yml @@ -0,0 +1,138 @@ +version: 2 +models: + - name: test_gold_testnet__fact_blocks_full + description: "This is a view used to test all of the gold fact blocks model." + tests: + - dbt_utils.unique_combination_of_columns: + combination_of_columns: + - BLOCK_NUMBER + - fsc_utils.sequence_gaps: + column_name: BLOCK_NUMBER + where: BLOCK_TIMESTAMP < CURRENT_DATE - 1 + + columns: + - name: BLOCK_NUMBER + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: BLOCK_HASH + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: BLOCK_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - name: NETWORK + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: ^[a-zA-Z0-9_]+$ + - name: TX_COUNT + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: SIZE + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: MINER + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: EXTRA_DATA + tests: + - not_null + - name: PARENT_HASH + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: GAS_USED + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: GAS_LIMIT + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: SHA3_UNCLES + tests: + - not_null + - name: UNCLE_BLOCKS + tests: + - not_null + - name: NONCE + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: RECEIPTS_ROOT + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: STATE_ROOT + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: TRANSACTIONS_ROOT + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: LOGS_BLOOM + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: FACT_BLOCKS_ID + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_unique + - name: INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: MODIFIED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ \ No newline at end of file diff --git a/models/testnet/core/gold/tests/blocks/test_gold_testnet__fact_blocks_recent.sql b/models/testnet/core/gold/tests/blocks/test_gold_testnet__fact_blocks_recent.sql new file mode 100644 index 0000000..f8ed01a --- /dev/null +++ b/models/testnet/core/gold/tests/blocks/test_gold_testnet__fact_blocks_recent.sql @@ -0,0 +1,16 @@ +{{ config ( + materialized = "view", + tags = ['recent_test'] +) }} + +SELECT + * +FROM + {{ ref('testnet__fact_blocks') }} +WHERE + block_number > ( + SELECT + block_number + FROM + {{ ref('_testnet_block_lookback') }} + ) diff --git a/models/testnet/core/gold/tests/blocks/test_gold_testnet__fact_blocks_recent.yml b/models/testnet/core/gold/tests/blocks/test_gold_testnet__fact_blocks_recent.yml new file mode 100644 index 0000000..8bdb032 --- /dev/null +++ b/models/testnet/core/gold/tests/blocks/test_gold_testnet__fact_blocks_recent.yml @@ -0,0 +1,147 @@ +version: 2 +models: + - name: test_gold_testnet__fact_blocks_recent + description: "This is a view used to test the last three days of fact blocks." + tests: + - dbt_utils.unique_combination_of_columns: + combination_of_columns: + - BLOCK_NUMBER + - fsc_utils.sequence_gaps: + column_name: BLOCK_NUMBER + config: + severity: error + error_if: ">10" + + columns: + - name: BLOCK_NUMBER + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: BLOCK_HASH + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: BLOCK_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - name: NETWORK + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: ^[a-zA-Z0-9_]+$ + - name: TX_COUNT + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: SIZE + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: MINER + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: EXTRA_DATA + tests: + - not_null + - name: PARENT_HASH + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: GAS_USED + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: GAS_LIMIT + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: DIFFICULTY + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: SHA3_UNCLES + tests: + - not_null + - name: UNCLE_BLOCKS + tests: + - not_null + - name: NONCE + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: RECEIPTS_ROOT + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: STATE_ROOT + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: TRANSACTIONS_ROOT + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: LOGS_BLOOM + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: FACT_BLOCKS_ID + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_unique + - name: INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: MODIFIED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ \ No newline at end of file diff --git a/models/testnet/core/gold/tests/event_logs/test_gold_testnet__fact_event_logs_full.sql b/models/testnet/core/gold/tests/event_logs/test_gold_testnet__fact_event_logs_full.sql new file mode 100644 index 0000000..409885f --- /dev/null +++ b/models/testnet/core/gold/tests/event_logs/test_gold_testnet__fact_event_logs_full.sql @@ -0,0 +1,9 @@ +{{ config ( + materialized = "view", + tags = ['full_test'] +) }} + +SELECT + * +FROM + {{ ref('testnet__fact_event_logs') }} \ No newline at end of file diff --git a/models/testnet/core/gold/tests/event_logs/test_gold_testnet__fact_event_logs_full.yml b/models/testnet/core/gold/tests/event_logs/test_gold_testnet__fact_event_logs_full.yml new file mode 100644 index 0000000..3bd2dc3 --- /dev/null +++ b/models/testnet/core/gold/tests/event_logs/test_gold_testnet__fact_event_logs_full.yml @@ -0,0 +1,102 @@ +version: 2 +models: + - name: test_gold_testnet__fact_event_logs_full + description: "This is a view used to test all of the gold fact event logs model." + tests: + - dbt_utils.unique_combination_of_columns: + combination_of_columns: + - TX_HASH + - EVENT_INDEX + - fsc_utils.sequence_gaps: + partition_by: + - BLOCK_NUMBER + - TX_HASH + column_name: EVENT_INDEX + where: BLOCK_TIMESTAMP < CURRENT_DATE - 1 + - fsc_evm.events_match_txs: + transactions_model: ref('test_gold_testnet__fact_transactions_full') + + columns: + - name: BLOCK_NUMBER + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - name: BLOCK_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: TX_HASH + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - fsc_utils.tx_block_count: + config: + severity: error + error_if: "!=0" + - name: TX_POSITION + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: EVENT_INDEX + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: CONTRACT_ADDRESS + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: TOPICS + tests: + - not_null + - name: DATA + tests: + - not_null + - name: EVENT_REMOVED + tests: + - not_null + - name: ORIGIN_FROM_ADDRESS + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: ORIGIN_TO_ADDRESS + tests: + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: ORIGIN_FUNCTION_SIGNATURE + tests: + - not_null + - name: TX_SUCCEEDED + tests: + - not_null + - name: FACT_EVENT_LOGS_ID + tests: + - not_null + - name: INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - name: MODIFIED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 \ No newline at end of file diff --git a/models/testnet/core/gold/tests/event_logs/test_gold_testnet__fact_event_logs_recent.sql b/models/testnet/core/gold/tests/event_logs/test_gold_testnet__fact_event_logs_recent.sql new file mode 100644 index 0000000..77a840e --- /dev/null +++ b/models/testnet/core/gold/tests/event_logs/test_gold_testnet__fact_event_logs_recent.sql @@ -0,0 +1,16 @@ +{{ config ( + materialized = "view", + tags = ['recent_test'] +) }} + +SELECT + * +FROM + {{ ref('testnet__fact_event_logs') }} +WHERE + block_number > ( + SELECT + block_number + FROM + {{ ref('_testnet_block_lookback') }} + ) diff --git a/models/testnet/core/gold/tests/event_logs/test_gold_testnet__fact_event_logs_recent.yml b/models/testnet/core/gold/tests/event_logs/test_gold_testnet__fact_event_logs_recent.yml new file mode 100644 index 0000000..9befadf --- /dev/null +++ b/models/testnet/core/gold/tests/event_logs/test_gold_testnet__fact_event_logs_recent.yml @@ -0,0 +1,101 @@ +version: 2 +models: + - name: test_gold_testnet__fact_event_logs_recent + description: "This is a view used to test the last three days of fact event logs." + tests: + - dbt_utils.unique_combination_of_columns: + combination_of_columns: + - TX_HASH + - EVENT_INDEX + - fsc_utils.sequence_gaps: + partition_by: + - BLOCK_NUMBER + - TX_HASH + column_name: EVENT_INDEX + - fsc_evm.events_match_txs: + transactions_model: ref('test_gold_testnet__fact_transactions_recent') + + columns: + - name: BLOCK_NUMBER + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - name: BLOCK_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: TX_HASH + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - fsc_utils.tx_block_count: + config: + severity: error + error_if: "!=0" + - name: TX_POSITION + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: EVENT_INDEX + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: CONTRACT_ADDRESS + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: TOPICS + tests: + - not_null + - name: DATA + tests: + - not_null + - name: EVENT_REMOVED + tests: + - not_null + - name: ORIGIN_FROM_ADDRESS + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: ORIGIN_TO_ADDRESS + tests: + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: ORIGIN_FUNCTION_SIGNATURE + tests: + - not_null + - name: TX_SUCCEEDED + tests: + - not_null + - name: FACT_EVENT_LOGS_ID + tests: + - not_null + - name: INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - name: MODIFIED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 \ No newline at end of file diff --git a/models/testnet/core/gold/tests/traces/test_gold_testnet__fact_traces_full.sql b/models/testnet/core/gold/tests/traces/test_gold_testnet__fact_traces_full.sql new file mode 100644 index 0000000..13b477e --- /dev/null +++ b/models/testnet/core/gold/tests/traces/test_gold_testnet__fact_traces_full.sql @@ -0,0 +1,9 @@ +{{ config ( + materialized = "view", + tags = ['full_test'] +) }} + +SELECT + * +FROM + {{ ref('testnet__fact_traces') }} \ No newline at end of file diff --git a/models/testnet/core/gold/tests/traces/test_gold_testnet__fact_traces_full.yml b/models/testnet/core/gold/tests/traces/test_gold_testnet__fact_traces_full.yml new file mode 100644 index 0000000..ef85345 --- /dev/null +++ b/models/testnet/core/gold/tests/traces/test_gold_testnet__fact_traces_full.yml @@ -0,0 +1,122 @@ +version: 2 +models: + - name: test_gold_testnet__fact_traces_full + description: "This is a view used to test all of the gold fact traces model." + tests: + - dbt_utils.unique_combination_of_columns: + combination_of_columns: + - TX_HASH + - TRACE_INDEX + - fsc_utils.sequence_gaps: + partition_by: + - TX_HASH + column_name: TRACE_INDEX + where: BLOCK_TIMESTAMP < CURRENT_DATE - 1 AND TX_HASH IS NOT NULL + - fsc_evm.txs_have_traces: + transactions_model: ref('test_gold_testnet__fact_transactions_full') + + columns: + - name: BLOCK_NUMBER + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: BLOCK_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: TX_HASH + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: TX_POSITION + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: TRACE_INDEX + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: FROM_ADDRESS + tests: + - not_null: + where: TYPE <> 'SELFDESTRUCT' + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: TO_ADDRESS + tests: + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + where: TO_ADDRESS IS NOT NULL + - name: INPUT + tests: + - not_null + - name: TYPE + tests: + - not_null + - name: TRACE_ADDRESS + tests: + - not_null + - name: SUB_TRACES + tests: + - not_null + - name: VALUE + tests: + - not_null + - name: VALUE_PRECISE_RAW + tests: + - not_null + - name: VALUE_PRECISE + tests: + - not_null + - name: VALUE_HEX + tests: + - not_null + - name: GAS + tests: + - not_null + - name: GAS_USED + tests: + - not_null + - name: ORIGIN_FROM_ADDRESS + tests: + - not_null + - name: ORIGIN_FUNCTION_SIGNATURE + tests: + - not_null + - name: TRACE_SUCCEEDED + tests: + - not_null + - name: TX_SUCCEEDED + tests: + - not_null + - name: FACT_TRACES_ID + tests: + - not_null + - name: INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - name: MODIFIED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 \ No newline at end of file diff --git a/models/testnet/core/gold/tests/traces/test_gold_testnet__fact_traces_recent.sql b/models/testnet/core/gold/tests/traces/test_gold_testnet__fact_traces_recent.sql new file mode 100644 index 0000000..0739726 --- /dev/null +++ b/models/testnet/core/gold/tests/traces/test_gold_testnet__fact_traces_recent.sql @@ -0,0 +1,16 @@ +{{ config ( + materialized = "view", + tags = ['recent_test'] +) }} + +SELECT + * +FROM + {{ ref('testnet__fact_traces') }} +WHERE + block_number > ( + SELECT + block_number + FROM + {{ ref('_testnet_block_lookback') }} + ) diff --git a/models/testnet/core/gold/tests/traces/test_gold_testnet__fact_traces_recent.yml b/models/testnet/core/gold/tests/traces/test_gold_testnet__fact_traces_recent.yml new file mode 100644 index 0000000..c2fe28a --- /dev/null +++ b/models/testnet/core/gold/tests/traces/test_gold_testnet__fact_traces_recent.yml @@ -0,0 +1,122 @@ +version: 2 +models: + - name: test_gold_testnet__fact_traces_recent + description: "This is a view used to test the last three days of fact traces." + tests: + - dbt_utils.unique_combination_of_columns: + combination_of_columns: + - TX_HASH + - TRACE_INDEX + - fsc_utils.sequence_gaps: + partition_by: + - TX_HASH + column_name: TRACE_INDEX + where: TX_HASH IS NOT NULL + - fsc_evm.txs_have_traces: + transactions_model: ref('test_gold_testnet__fact_transactions_recent') + + columns: + - name: BLOCK_NUMBER + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: BLOCK_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: TX_HASH + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: TX_POSITION + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: TRACE_INDEX + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: FROM_ADDRESS + tests: + - not_null: + where: TYPE <> 'SELFDESTRUCT' + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: TO_ADDRESS + tests: + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + where: TO_ADDRESS IS NOT NULL + - name: INPUT + tests: + - not_null + - name: TYPE + tests: + - not_null + - name: TRACE_ADDRESS + tests: + - not_null + - name: SUB_TRACES + tests: + - not_null + - name: VALUE + tests: + - not_null + - name: VALUE_PRECISE_RAW + tests: + - not_null + - name: VALUE_PRECISE + tests: + - not_null + - name: VALUE_HEX + tests: + - not_null + - name: GAS + tests: + - not_null + - name: GAS_USED + tests: + - not_null + - name: ORIGIN_FROM_ADDRESS + tests: + - not_null + - name: ORIGIN_FUNCTION_SIGNATURE + tests: + - not_null + - name: TRACE_SUCCEEDED + tests: + - not_null + - name: TX_SUCCEEDED + tests: + - not_null + - name: FACT_TRACES_ID + tests: + - not_null + - name: INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - name: MODIFIED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 \ No newline at end of file diff --git a/models/testnet/core/gold/tests/transactions/test_gold_testnet__fact_transactions_full.sql b/models/testnet/core/gold/tests/transactions/test_gold_testnet__fact_transactions_full.sql new file mode 100644 index 0000000..90cdbec --- /dev/null +++ b/models/testnet/core/gold/tests/transactions/test_gold_testnet__fact_transactions_full.sql @@ -0,0 +1,9 @@ +{{ config ( + materialized = "view", + tags = ['full_test'] +) }} + +SELECT + * +FROM + {{ ref('testnet__fact_transactions') }} \ No newline at end of file diff --git a/models/testnet/core/gold/tests/transactions/test_gold_testnet__fact_transactions_full.yml b/models/testnet/core/gold/tests/transactions/test_gold_testnet__fact_transactions_full.yml new file mode 100644 index 0000000..f9e4a1e --- /dev/null +++ b/models/testnet/core/gold/tests/transactions/test_gold_testnet__fact_transactions_full.yml @@ -0,0 +1,125 @@ +version: 2 +models: + - name: test_gold_testnet__fact_transactions_full + description: "This is a view used to test all of the gold fact transactions model." + tests: + - dbt_utils.unique_combination_of_columns: + combination_of_columns: + - TX_HASH + - fsc_utils.sequence_gaps: + partition_by: + - BLOCK_NUMBER + column_name: TX_POSITION + where: BLOCK_TIMESTAMP < CURRENT_DATE - 1 + - fsc_evm.txs_match_blocks: + blocks_model: ref('test_gold_testnet__fact_blocks_full') + + columns: + - name: BLOCK_NUMBER + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: BLOCK_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - name: TX_HASH + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: FROM_ADDRESS + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: TO_ADDRESS + tests: + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + where: TO_ADDRESS IS NOT NULL + - name: ORIGIN_FUNCTION_SIGNATURE + tests: + - not_null + - name: VALUE + tests: + - not_null + - name: VALUE_PRECISE_RAW + tests: + - not_null + - name: VALUE_PRECISE + tests: + - not_null + - name: TX_FEE + tests: + - not_null + - name: TX_FEE_PRECISE + tests: + - not_null + - name: TX_SUCCEEDED + tests: + - not_null + - name: TX_TYPE + tests: + - not_null + - name: NONCE + tests: + - not_null + - name: TX_POSITION + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: INPUT_DATA + tests: + - not_null + - name: GAS_PRICE + tests: + - not_null + - name: GAS_USED + tests: + - not_null + - name: GAS_LIMIT + tests: + - not_null + - name: CUMULATIVE_GAS_USED + tests: + - not_null + - name: EFFECTIVE_GAS_PRICE + tests: + - not_null + - name: R + tests: + - not_null + - name: S + tests: + - not_null + - name: V + tests: + - not_null + - name: FACT_TRANSACTIONS_ID + tests: + - not_null + - name: INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - name: MODIFIED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 \ No newline at end of file diff --git a/models/testnet/core/gold/tests/transactions/test_gold_testnet__fact_transactions_recent.sql b/models/testnet/core/gold/tests/transactions/test_gold_testnet__fact_transactions_recent.sql new file mode 100644 index 0000000..17fc7e8 --- /dev/null +++ b/models/testnet/core/gold/tests/transactions/test_gold_testnet__fact_transactions_recent.sql @@ -0,0 +1,16 @@ +{{ config ( + materialized = "view", + tags = ['recent_test'] +) }} + +SELECT + * +FROM + {{ ref('testnet__fact_transactions') }} +WHERE + block_number > ( + SELECT + block_number + FROM + {{ ref('_testnet_block_lookback') }} + ) diff --git a/models/testnet/core/gold/tests/transactions/test_gold_testnet__fact_transactions_recent.yml b/models/testnet/core/gold/tests/transactions/test_gold_testnet__fact_transactions_recent.yml new file mode 100644 index 0000000..f46afaa --- /dev/null +++ b/models/testnet/core/gold/tests/transactions/test_gold_testnet__fact_transactions_recent.yml @@ -0,0 +1,124 @@ +version: 2 +models: + - name: test_gold_testnet__fact_transactions_recent + description: "This is a view used to test the last three days of fact transactions." + tests: + - dbt_utils.unique_combination_of_columns: + combination_of_columns: + - TX_HASH + - fsc_utils.sequence_gaps: + partition_by: + - BLOCK_NUMBER + column_name: TX_POSITION + - fsc_evm.txs_match_blocks: + blocks_model: ref('test_gold_testnet__fact_blocks_recent') + + columns: + - name: BLOCK_NUMBER + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: BLOCK_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - name: TX_HASH + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: FROM_ADDRESS + tests: + - not_null + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + - name: TO_ADDRESS + tests: + - dbt_expectations.expect_column_values_to_match_regex: + regex: 0[xX][0-9a-fA-F]+ + where: TO_ADDRESS IS NOT NULL + - name: ORIGIN_FUNCTION_SIGNATURE + tests: + - not_null + - name: VALUE + tests: + - not_null + - name: VALUE_PRECISE_RAW + tests: + - not_null + - name: VALUE_PRECISE + tests: + - not_null + - name: TX_FEE + tests: + - not_null + - name: TX_FEE_PRECISE + tests: + - not_null + - name: TX_SUCCEEDED + tests: + - not_null + - name: TX_TYPE + tests: + - not_null + - name: NONCE + tests: + - not_null + - name: TX_POSITION + tests: + - not_null + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - NUMBER + - FLOAT + - name: INPUT_DATA + tests: + - not_null + - name: GAS_PRICE + tests: + - not_null + - name: GAS_USED + tests: + - not_null + - name: GAS_LIMIT + tests: + - not_null + - name: CUMULATIVE_GAS_USED + tests: + - not_null + - name: EFFECTIVE_GAS_PRICE + tests: + - not_null + - name: R + tests: + - not_null + - name: S + tests: + - not_null + - name: V + tests: + - not_null + - name: FACT_TRANSACTIONS_ID + tests: + - not_null + - name: INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - name: MODIFIED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 \ No newline at end of file diff --git a/models/testnet/core/silver/silver_testnet__blocks.sql b/models/testnet/core/silver/silver_testnet__blocks.sql new file mode 100644 index 0000000..d61214f --- /dev/null +++ b/models/testnet/core/silver/silver_testnet__blocks.sql @@ -0,0 +1,41 @@ +-- depends_on: {{ ref('bronze_testnet__blocks') }} +{{ config ( + materialized = "incremental", + incremental_strategy = 'delete+insert', + unique_key = "block_number", + cluster_by = ['modified_timestamp::DATE','partition_key'], + post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)", + tags = ['silver_testnet'] +) }} + +WITH bronze_blocks AS ( + SELECT + block_number, + partition_key, + DATA AS block_json, + _inserted_timestamp + FROM + {% if is_incremental() %} + {{ ref('bronze_testnet__blocks') }} + WHERE _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1900-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM {{ this }} + ) AND DATA IS NOT NULL + {% else %} + {{ ref('bronze_testnet__blocks_fr') }} + WHERE DATA IS NOT NULL + {% endif %} +) + +SELECT + block_number, + partition_key, + block_json, + _inserted_timestamp, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS blocks_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM bronze_blocks +QUALIFY ROW_NUMBER() OVER (PARTITION BY blocks_id ORDER BY _inserted_timestamp DESC) = 1 \ No newline at end of file diff --git a/models/testnet/core/silver/silver_testnet__contracts.sql b/models/testnet/core/silver/silver_testnet__contracts.sql new file mode 100644 index 0000000..8b7968b --- /dev/null +++ b/models/testnet/core/silver/silver_testnet__contracts.sql @@ -0,0 +1,112 @@ +{{ config( + materialized = 'incremental', + unique_key = 'contract_address', + merge_exclude_columns = ["inserted_timestamp"], + tags = ['silver_testnet','contracts'] +) }} + +WITH base_metadata AS ( + + SELECT + contract_address, + block_number, + function_sig AS function_signature, + read_result AS read_output, + _inserted_timestamp + FROM + {{ ref('bronze_api_testnet__token_reads') }} + WHERE + read_result IS NOT NULL + AND read_result <> '0x' + +{% if is_incremental() %} +AND _inserted_timestamp >= ( + SELECT + COALESCE( + MAX( + _inserted_timestamp + ), + '1970-01-01' + ) + FROM + {{ this }} +) +{% endif %} +), +token_names AS ( + SELECT + contract_address, + block_number, + function_signature, + read_output, + utils.udf_hex_to_string( + SUBSTR(read_output,(64 * 2 + 3), len(read_output))) AS token_name + FROM + base_metadata + WHERE + function_signature = '0x06fdde03' + AND token_name IS NOT NULL + ), + token_symbols AS ( + SELECT + contract_address, + block_number, + function_signature, + read_output, + utils.udf_hex_to_string( + SUBSTR(read_output,(64 * 2 + 3), len(read_output))) AS token_symbol + FROM + base_metadata + WHERE + function_signature = '0x95d89b41' + AND token_symbol IS NOT NULL + ), + token_decimals AS ( + SELECT + contract_address, + CASE + WHEN read_output IS NOT NULL THEN utils.udf_hex_to_int( + read_output :: STRING + ) + ELSE NULL + END AS token_decimals, + LENGTH(token_decimals) AS dec_length + FROM + base_metadata + WHERE + function_signature = '0x313ce567' + AND read_output IS NOT NULL + AND read_output <> '0x' + ), + contracts AS ( + SELECT + contract_address, + MAX(_inserted_timestamp) AS _inserted_timestamp + FROM + base_metadata + GROUP BY + 1 + ) + SELECT + c1.contract_address :: STRING AS contract_address, + token_name, + TRY_TO_NUMBER(token_decimals) AS token_decimals, + token_symbol, + _inserted_timestamp, + {{ dbt_utils.generate_surrogate_key( + ['c1.contract_address'] + ) }} AS contracts_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + '{{ invocation_id }}' AS _invocation_id + FROM + contracts c1 + LEFT JOIN token_names + ON c1.contract_address = token_names.contract_address + LEFT JOIN token_symbols + ON c1.contract_address = token_symbols.contract_address + LEFT JOIN token_decimals + ON c1.contract_address = token_decimals.contract_address + AND dec_length < 3 qualify(ROW_NUMBER() over(PARTITION BY c1.contract_address + ORDER BY + _inserted_timestamp DESC)) = 1 \ No newline at end of file diff --git a/models/testnet/core/silver/silver_testnet__created_contracts.sql b/models/testnet/core/silver/silver_testnet__created_contracts.sql new file mode 100644 index 0000000..059c0b2 --- /dev/null +++ b/models/testnet/core/silver/silver_testnet__created_contracts.sql @@ -0,0 +1,44 @@ +{{ config ( + materialized = "incremental", + unique_key = "created_contract_address", + merge_exclude_columns = ["inserted_timestamp"], + post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(block_timestamp, tx_hash, created_contract_address, creator_address), SUBSTRING(created_contract_address, creator_address)", + tags = ['silver_testnet','contracts'] +) }} + +SELECT + block_number, + block_timestamp, + tx_hash, + to_address AS created_contract_address, + from_address AS creator_address, + input AS created_contract_input, + inserted_timestamp AS _inserted_timestamp, + {{ dbt_utils.generate_surrogate_key( + ['to_address'] + ) }} AS created_contracts_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {{ ref('testnet__fact_traces') }} +WHERE + TYPE ILIKE 'create%' + AND to_address IS NOT NULL + AND input IS NOT NULL + AND input != '0x' + AND trace_succeeded + AND tx_succeeded + +{% if is_incremental() %} +AND inserted_timestamp >= ( + SELECT + MAX(inserted_timestamp) - INTERVAL '4 hours' + FROM + {{ this }} +) +{% endif %} + +qualify(ROW_NUMBER() over(PARTITION BY created_contract_address +ORDER BY + _inserted_timestamp DESC)) = 1 \ No newline at end of file diff --git a/models/testnet/core/silver/silver_testnet__receipts.sql b/models/testnet/core/silver/silver_testnet__receipts.sql new file mode 100644 index 0000000..e9cb277 --- /dev/null +++ b/models/testnet/core/silver/silver_testnet__receipts.sql @@ -0,0 +1,44 @@ +-- depends_on: {{ ref('bronze_testnet__receipts') }} +{{ config ( + materialized = "incremental", + incremental_strategy = 'delete+insert', + unique_key = "block_number", + cluster_by = ['modified_timestamp::DATE','partition_key'], + post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)", + tags = ['silver_testnet'] +) }} + +WITH bronze_receipts AS ( + SELECT + block_number, + partition_key, + array_index, + DATA AS receipts_json, + _inserted_timestamp + FROM + {% if is_incremental() %} + {{ ref('bronze_testnet__receipts') }} + WHERE _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1900-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM {{ this }} + ) AND DATA IS NOT NULL + {% else %} + {{ ref('bronze_testnet__receipts_fr') }} + WHERE DATA IS NOT NULL + {% endif %} +) + +SELECT + block_number, + partition_key, + array_index, + receipts_json, + _inserted_timestamp, + {{ dbt_utils.generate_surrogate_key(['block_number','array_index']) }} AS receipts_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM bronze_receipts +where array_index is not null +QUALIFY ROW_NUMBER() OVER (PARTITION BY receipts_id ORDER BY block_number DESC, _inserted_timestamp DESC) = 1 \ No newline at end of file diff --git a/models/testnet/core/silver/silver_testnet__relevant_contracts.sql b/models/testnet/core/silver/silver_testnet__relevant_contracts.sql new file mode 100644 index 0000000..805e183 --- /dev/null +++ b/models/testnet/core/silver/silver_testnet__relevant_contracts.sql @@ -0,0 +1,134 @@ +{{ config( + materialized = 'incremental', + unique_key = "contract_address", + post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(contract_address)", + tags = ['silver_testnet','contracts'] +) }} + +WITH emitted_events AS ( + + SELECT + contract_address, + COUNT(*) AS event_count, + MAX(inserted_timestamp) AS max_inserted_timestamp_logs, + MAX(block_number) AS latest_event_block + FROM + {{ ref('testnet__fact_event_logs') }} + +{% if is_incremental() %} +WHERE + inserted_timestamp > ( + SELECT + MAX(max_inserted_timestamp_logs) + FROM + {{ this }} + ) +{% endif %} +GROUP BY + contract_address +), +function_calls AS ( + SELECT + to_address AS contract_address, + COUNT(*) AS function_call_count, + MAX(inserted_timestamp) AS max_inserted_timestamp_traces, + MAX(block_number) AS latest_call_block + FROM + {{ ref('testnet__fact_traces') }} + WHERE + tx_succeeded + AND trace_succeeded + AND to_address IS NOT NULL + AND input IS NOT NULL + AND input <> '0x' + +{% if is_incremental() %} +AND inserted_timestamp > ( + SELECT + MAX(max_inserted_timestamp_traces) + FROM + {{ this }} +) +{% endif %} +GROUP BY + 1 +), +active_contracts AS ( + SELECT + contract_address + FROM + emitted_events + UNION + SELECT + contract_address + FROM + function_calls +), +previous_totals AS ( + +{% if is_incremental() %} +SELECT + contract_address, total_event_count, total_call_count, max_inserted_timestamp_logs, latest_event_block, max_inserted_timestamp_traces, latest_call_block +FROM + {{ this }} +{% else %} +SELECT + NULL AS contract_address, 0 AS total_event_count, 0 AS total_call_count, '1970-01-01 00:00:00' AS max_inserted_timestamp_logs, 0 AS latest_event_block, '1970-01-01 00:00:00' AS max_inserted_timestamp_traces, 0 AS latest_call_block +{% endif %}) +SELECT + C.contract_address, + COALESCE( + p.total_event_count, + 0 + ) + COALESCE( + e.event_count, + 0 + ) AS total_event_count, + COALESCE( + p.total_call_count, + 0 + ) + COALESCE( + f.function_call_count, + 0 + ) AS total_call_count, + COALESCE( + p.total_event_count, + 0 + ) + COALESCE( + e.event_count, + 0 + ) + COALESCE( + p.total_call_count, + 0 + ) + COALESCE( + f.function_call_count, + 0 + ) AS total_interaction_count, + COALESCE( + e.max_inserted_timestamp_logs, + p.max_inserted_timestamp_logs, + '1970-01-01 00:00:00' + ) AS max_inserted_timestamp_logs, + COALESCE( + f.max_inserted_timestamp_traces, + p.max_inserted_timestamp_traces, + '1970-01-01 00:00:00' + ) AS max_inserted_timestamp_traces, + COALESCE( + e.latest_event_block, + p.latest_event_block, + 0 + ) AS latest_event_block, + COALESCE( + f.latest_call_block, + p.latest_call_block, + 0 + ) AS latest_call_block +FROM + active_contracts C + LEFT JOIN emitted_events e + ON C.contract_address = e.contract_address + LEFT JOIN function_calls f + ON C.contract_address = f.contract_address + LEFT JOIN previous_totals p + ON C.contract_address = p.contract_address \ No newline at end of file diff --git a/models/testnet/core/silver/silver_testnet__traces.sql b/models/testnet/core/silver/silver_testnet__traces.sql new file mode 100644 index 0000000..60e42a7 --- /dev/null +++ b/models/testnet/core/silver/silver_testnet__traces.sql @@ -0,0 +1,136 @@ +-- depends_on: {{ ref('bronze_testnet__traces') }} + +{{ config ( + materialized = "incremental", + incremental_strategy = 'delete+insert', + unique_key = "block_number", + cluster_by = ['modified_timestamp::DATE','partition_key'], + post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)", + tags = ['silver_testnet'] +) }} + + WITH bronze_traces AS ( + SELECT + block_number, + partition_key, + VALUE :array_index :: INT AS tx_position, + DATA :result AS full_traces, + _inserted_timestamp + FROM + +{% if is_incremental() %} +{{ ref('bronze_testnet__traces') }} +WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1900-01-01') _inserted_timestamp + FROM + {{ this }} + ) AND DATA :result IS NOT NULL +{% else %} + {{ ref('bronze_testnet__traces_fr') }} +WHERE DATA :result IS NOT NULL +{% endif %} + +qualify(ROW_NUMBER() over (PARTITION BY block_number, tx_position +ORDER BY + _inserted_timestamp DESC)) = 1 +), +flatten_traces AS ( + SELECT + block_number, + tx_position, + partition_key, + IFF( + path IN ( + 'result', + 'result.value', + 'result.type', + 'result.to', + 'result.input', + 'result.gasUsed', + 'result.gas', + 'result.from', + 'result.output', + 'result.error', + 'result.revertReason', + 'result.time', + 'gasUsed', + 'gas', + 'type', + 'to', + 'from', + 'value', + 'input', + 'error', + 'output', + 'time', + 'revertReason' + ), + 'ORIGIN', + REGEXP_REPLACE(REGEXP_REPLACE(path, '[^0-9]+', '_'), '^_|_$', '') + ) AS trace_address, + _inserted_timestamp, + OBJECT_AGG( + key, + VALUE + ) AS trace_json, + CASE + WHEN trace_address = 'ORIGIN' THEN NULL + WHEN POSITION( + '_' IN trace_address + ) = 0 THEN 'ORIGIN' + ELSE REGEXP_REPLACE( + trace_address, + '_[0-9]+$', + '', + 1, + 1 + ) + END AS parent_trace_address, + SPLIT( + trace_address, + '_' + ) AS trace_address_array + FROM + bronze_traces txs, + TABLE( + FLATTEN( + input => PARSE_JSON( + txs.full_traces + ), + recursive => TRUE + ) + ) f + WHERE + f.index IS NULL + AND f.key != 'calls' + AND f.path != 'result' + GROUP BY + block_number, + tx_position, + partition_key, + trace_address, + _inserted_timestamp +) +SELECT + block_number, + tx_position, + trace_address, + parent_trace_address, + trace_address_array, + trace_json, + partition_key, + _inserted_timestamp, + {{ dbt_utils.generate_surrogate_key( + ['block_number'] + + ['tx_position'] + + ['trace_address'] + ) }} AS traces_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + flatten_traces qualify(ROW_NUMBER() over(PARTITION BY traces_id +ORDER BY + _inserted_timestamp DESC)) = 1 \ No newline at end of file diff --git a/models/testnet/core/silver/silver_testnet__transactions.sql b/models/testnet/core/silver/silver_testnet__transactions.sql new file mode 100644 index 0000000..19f7ed9 --- /dev/null +++ b/models/testnet/core/silver/silver_testnet__transactions.sql @@ -0,0 +1,44 @@ +-- depends_on: {{ ref('bronze_testnet__transactions') }} +{{ config ( + materialized = "incremental", + incremental_strategy = 'delete+insert', + unique_key = "block_number", + cluster_by = ['modified_timestamp::DATE','partition_key'], + post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)", + tags = ['silver_testnet'] +) }} + +WITH bronze_transactions AS ( + SELECT + block_number, + partition_key, + VALUE :array_index :: INT AS tx_position, + DATA AS transaction_json, + _inserted_timestamp + FROM + {% if is_incremental() %} + {{ ref('bronze_testnet__transactions') }} + WHERE _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1900-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM {{ this }} + ) AND DATA IS NOT NULL + {% else %} + {{ ref('bronze_testnet__transactions_fr') }} + WHERE DATA IS NOT NULL + {% endif %} +) + +SELECT + block_number, + partition_key, + tx_position, + transaction_json, + _inserted_timestamp, + {{ dbt_utils.generate_surrogate_key(['block_number','tx_position']) }} AS transactions_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM bronze_transactions +where tx_position is not null +QUALIFY ROW_NUMBER() OVER (PARTITION BY transactions_id ORDER BY _inserted_timestamp DESC) = 1 \ No newline at end of file diff --git a/models/testnet/core/silver/tests/blocks/test_silver_testnet__blocks_full.sql b/models/testnet/core/silver/tests/blocks/test_silver_testnet__blocks_full.sql new file mode 100644 index 0000000..1338241 --- /dev/null +++ b/models/testnet/core/silver/tests/blocks/test_silver_testnet__blocks_full.sql @@ -0,0 +1,9 @@ +{{ config ( + materialized = "view", + tags = ['full_test'] +) }} + +SELECT + * +FROM + {{ ref('silver_testnet__blocks') }} diff --git a/models/testnet/core/silver/tests/blocks/test_silver_testnet__blocks_full.yml b/models/testnet/core/silver/tests/blocks/test_silver_testnet__blocks_full.yml new file mode 100644 index 0000000..20bfc6d --- /dev/null +++ b/models/testnet/core/silver/tests/blocks/test_silver_testnet__blocks_full.yml @@ -0,0 +1,51 @@ +version: 2 +models: + - name: test_silver_testnet__blocks_full + description: "This is a view used to test all of the silver blocks model." + tests: + - dbt_utils.unique_combination_of_columns: + combination_of_columns: + - BLOCK_NUMBER + - fsc_utils.sequence_gaps: + column_name: BLOCK_NUMBER + config: + severity: error + error_if: ">10" + + columns: + - name: BLOCK_NUMBER + tests: + - not_null + - name: BLOCK_JSON + tests: + - not_null + - name: _INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: MODIFIED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ \ No newline at end of file diff --git a/models/testnet/core/silver/tests/blocks/test_silver_testnet__blocks_recent.sql b/models/testnet/core/silver/tests/blocks/test_silver_testnet__blocks_recent.sql new file mode 100644 index 0000000..65123e3 --- /dev/null +++ b/models/testnet/core/silver/tests/blocks/test_silver_testnet__blocks_recent.sql @@ -0,0 +1,16 @@ +{{ config ( + materialized = "view", + tags = ['recent_test'] +) }} + +SELECT + * +FROM + {{ ref('silver_testnet__blocks') }} +WHERE + block_number > ( + SELECT + block_number + FROM + {{ ref('_testnet_block_lookback') }} + ) diff --git a/models/testnet/core/silver/tests/blocks/test_silver_testnet__blocks_recent.yml b/models/testnet/core/silver/tests/blocks/test_silver_testnet__blocks_recent.yml new file mode 100644 index 0000000..b2d4454 --- /dev/null +++ b/models/testnet/core/silver/tests/blocks/test_silver_testnet__blocks_recent.yml @@ -0,0 +1,51 @@ +version: 2 +models: + - name: test_silver_testnet__blocks_recent + description: "This is a view used to test the last three days of blocks." + tests: + - dbt_utils.unique_combination_of_columns: + combination_of_columns: + - BLOCK_NUMBER + - fsc_utils.sequence_gaps: + column_name: BLOCK_NUMBER + config: + severity: error + error_if: ">10" + + columns: + - name: BLOCK_NUMBER + tests: + - not_null + - name: BLOCK_JSON + tests: + - not_null + - name: _INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: MODIFIED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ \ No newline at end of file diff --git a/models/testnet/core/silver/tests/receipts/test_silver_testnet__receipts_full.sql b/models/testnet/core/silver/tests/receipts/test_silver_testnet__receipts_full.sql new file mode 100644 index 0000000..8086b59 --- /dev/null +++ b/models/testnet/core/silver/tests/receipts/test_silver_testnet__receipts_full.sql @@ -0,0 +1,9 @@ +{{ config ( + materialized = "view", + tags = ['full_test'] +) }} + +SELECT + * +FROM + {{ ref('silver_testnet__receipts') }} diff --git a/models/testnet/core/silver/tests/receipts/test_silver_testnet__receipts_full.yml b/models/testnet/core/silver/tests/receipts/test_silver_testnet__receipts_full.yml new file mode 100644 index 0000000..1c8a6ca --- /dev/null +++ b/models/testnet/core/silver/tests/receipts/test_silver_testnet__receipts_full.yml @@ -0,0 +1,49 @@ +version: 2 +models: + - name: test_silver_testnet__receipts_full + description: "This is a view used to test all of the silver receipts model." + tests: + - dbt_utils.unique_combination_of_columns: + combination_of_columns: + - RECEIPTS_ID + + columns: + - name: BLOCK_NUMBER + tests: + - not_null + - name: ARRAY_INDEX + tests: + - not_null + - name: RECEIPTS_JSON + tests: + - not_null + - name: _INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: MODIFIED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ \ No newline at end of file diff --git a/models/testnet/core/silver/tests/receipts/test_silver_testnet__receipts_recent.sql b/models/testnet/core/silver/tests/receipts/test_silver_testnet__receipts_recent.sql new file mode 100644 index 0000000..4354b49 --- /dev/null +++ b/models/testnet/core/silver/tests/receipts/test_silver_testnet__receipts_recent.sql @@ -0,0 +1,16 @@ +{{ config ( + materialized = "view", + tags = ['recent_test'] +) }} + +SELECT + * +FROM + {{ ref('silver_testnet__receipts') }} +WHERE + block_number > ( + SELECT + block_number + FROM + {{ ref('_testnet_block_lookback') }} + ) diff --git a/models/testnet/core/silver/tests/receipts/test_silver_testnet__receipts_recent.yml b/models/testnet/core/silver/tests/receipts/test_silver_testnet__receipts_recent.yml new file mode 100644 index 0000000..35b34bb --- /dev/null +++ b/models/testnet/core/silver/tests/receipts/test_silver_testnet__receipts_recent.yml @@ -0,0 +1,49 @@ +version: 2 +models: + - name: test_silver_testnet__receipts_recent + description: "This is a view used to test the last three days of receipts." + tests: + - dbt_utils.unique_combination_of_columns: + combination_of_columns: + - RECEIPTS_ID + + columns: + - name: BLOCK_NUMBER + tests: + - not_null + - name: ARRAY_INDEX + tests: + - not_null + - name: RECEIPTS_JSON + tests: + - not_null + - name: _INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: MODIFIED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ \ No newline at end of file diff --git a/models/testnet/core/silver/tests/traces/test_silver_testnet__traces_full.sql b/models/testnet/core/silver/tests/traces/test_silver_testnet__traces_full.sql new file mode 100644 index 0000000..4080042 --- /dev/null +++ b/models/testnet/core/silver/tests/traces/test_silver_testnet__traces_full.sql @@ -0,0 +1,9 @@ +{{ config ( + materialized = "view", + tags = ['full_test'] +) }} + +SELECT + * +FROM + {{ ref('silver_testnet__traces') }} diff --git a/models/testnet/core/silver/tests/traces/test_silver_testnet__traces_full.yml b/models/testnet/core/silver/tests/traces/test_silver_testnet__traces_full.yml new file mode 100644 index 0000000..24e2740 --- /dev/null +++ b/models/testnet/core/silver/tests/traces/test_silver_testnet__traces_full.yml @@ -0,0 +1,52 @@ +version: 2 +models: + - name: test_silver_testnet__traces_full + description: "This is a view used to test all of the silver traces model." + tests: + - dbt_utils.unique_combination_of_columns: + combination_of_columns: + - TRACES_ID + + columns: + - name: BLOCK_NUMBER + tests: + - not_null + - name: TX_POSITION + tests: + - not_null + - name: TRACE_ADDRESS + tests: + - not_null + - name: TRACE_JSON + tests: + - not_null + - name: _INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: MODIFIED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ \ No newline at end of file diff --git a/models/testnet/core/silver/tests/traces/test_silver_testnet__traces_recent.sql b/models/testnet/core/silver/tests/traces/test_silver_testnet__traces_recent.sql new file mode 100644 index 0000000..b5253b2 --- /dev/null +++ b/models/testnet/core/silver/tests/traces/test_silver_testnet__traces_recent.sql @@ -0,0 +1,16 @@ +{{ config ( + materialized = "view", + tags = ['recent_test'] +) }} + +SELECT + * +FROM + {{ ref('silver_testnet__traces') }} +WHERE + block_number > ( + SELECT + block_number + FROM + {{ ref('_testnet_block_lookback') }} + ) diff --git a/models/testnet/core/silver/tests/traces/test_silver_testnet__traces_recent.yml b/models/testnet/core/silver/tests/traces/test_silver_testnet__traces_recent.yml new file mode 100644 index 0000000..c743ee8 --- /dev/null +++ b/models/testnet/core/silver/tests/traces/test_silver_testnet__traces_recent.yml @@ -0,0 +1,52 @@ +version: 2 +models: + - name: test_silver_testnet__traces_recent + description: "This is a view used to test the last three days of traces." + tests: + - dbt_utils.unique_combination_of_columns: + combination_of_columns: + - TRACES_ID + + columns: + - name: BLOCK_NUMBER + tests: + - not_null + - name: TX_POSITION + tests: + - not_null + - name: TRACE_ADDRESS + tests: + - not_null + - name: TRACE_JSON + tests: + - not_null + - name: _INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: MODIFIED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ \ No newline at end of file diff --git a/models/testnet/core/silver/tests/transactions/test_silver_testnet__transactions_full.sql b/models/testnet/core/silver/tests/transactions/test_silver_testnet__transactions_full.sql new file mode 100644 index 0000000..4eeafb7 --- /dev/null +++ b/models/testnet/core/silver/tests/transactions/test_silver_testnet__transactions_full.sql @@ -0,0 +1,9 @@ +{{ config ( + materialized = "view", + tags = ['full_test'] +) }} + +SELECT + * +FROM + {{ ref('silver_testnet__transactions') }} \ No newline at end of file diff --git a/models/testnet/core/silver/tests/transactions/test_silver_testnet__transactions_full.yml b/models/testnet/core/silver/tests/transactions/test_silver_testnet__transactions_full.yml new file mode 100644 index 0000000..e76bb11 --- /dev/null +++ b/models/testnet/core/silver/tests/transactions/test_silver_testnet__transactions_full.yml @@ -0,0 +1,53 @@ +version: 2 +models: + - name: test_silver_testnet__transactions_full + description: "This is a view used to test all of the silver transactions model." + tests: + - dbt_utils.unique_combination_of_columns: + combination_of_columns: + - TRANSACTIONS_ID + - fsc_utils.sequence_gaps: + partition_by: + - BLOCK_NUMBER + column_name: TX_POSITION + + columns: + - name: BLOCK_NUMBER + tests: + - not_null + - name: TX_POSITION + tests: + - not_null + - name: TRANSACTION_JSON + tests: + - not_null + - name: _INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: MODIFIED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ \ No newline at end of file diff --git a/models/testnet/core/silver/tests/transactions/test_silver_testnet__transactions_recent.sql b/models/testnet/core/silver/tests/transactions/test_silver_testnet__transactions_recent.sql new file mode 100644 index 0000000..4a8471c --- /dev/null +++ b/models/testnet/core/silver/tests/transactions/test_silver_testnet__transactions_recent.sql @@ -0,0 +1,16 @@ +{{ config ( + materialized = "view", + tags = ['recent_test'] +) }} + +SELECT + * +FROM + {{ ref('silver_testnet__transactions') }} +WHERE + block_number > ( + SELECT + block_number + FROM + {{ ref('_testnet_block_lookback') }} + ) diff --git a/models/testnet/core/silver/tests/transactions/test_silver_testnet__transactions_recent.yml b/models/testnet/core/silver/tests/transactions/test_silver_testnet__transactions_recent.yml new file mode 100644 index 0000000..577d282 --- /dev/null +++ b/models/testnet/core/silver/tests/transactions/test_silver_testnet__transactions_recent.yml @@ -0,0 +1,53 @@ +version: 2 +models: + - name: test_silver_testnet__transactions_recent + description: "This is a view used to test the last three days of transactions." + tests: + - dbt_utils.unique_combination_of_columns: + combination_of_columns: + - TRANSACTIONS_ID + - fsc_utils.sequence_gaps: + partition_by: + - BLOCK_NUMBER + column_name: TX_POSITION + + columns: + - name: BLOCK_NUMBER + tests: + - not_null + - name: TX_POSITION + tests: + - not_null + - name: TRANSACTION_JSON + tests: + - not_null + - name: _INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: INSERTED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ + - name: MODIFIED_TIMESTAMP + tests: + - not_null + - dbt_expectations.expect_row_values_to_have_recent_data: + datepart: hour + interval: 2 + - dbt_expectations.expect_column_values_to_be_in_type_list: + column_type_list: + - TIMESTAMP_LTZ + - TIMESTAMP_NTZ \ No newline at end of file diff --git a/models/testnet/core/streamline/complete/streamline__testnet_blocks_complete.sql b/models/testnet/core/streamline/complete/streamline__testnet_blocks_complete.sql new file mode 100644 index 0000000..35c1fe4 --- /dev/null +++ b/models/testnet/core/streamline/complete/streamline__testnet_blocks_complete.sql @@ -0,0 +1,33 @@ +-- depends_on: {{ ref('bronze_testnet__blocks') }} + +{{ config ( + materialized = "incremental", + unique_key = "block_number", + cluster_by = "ROUND(block_number, -3)", + post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)", + tags = ['streamline_testnet_complete'] +) }} + +SELECT + block_number, + file_name, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_testnet_blocks_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze_testnet__blocks') }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze_testnet__blocks_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1 \ No newline at end of file diff --git a/models/testnet/core/streamline/complete/streamline__testnet_receipts_complete.sql b/models/testnet/core/streamline/complete/streamline__testnet_receipts_complete.sql new file mode 100644 index 0000000..1e12d80 --- /dev/null +++ b/models/testnet/core/streamline/complete/streamline__testnet_receipts_complete.sql @@ -0,0 +1,33 @@ +-- depends_on: {{ ref('bronze_testnet__receipts') }} + +{{ config ( + materialized = "incremental", + unique_key = "block_number", + cluster_by = "ROUND(block_number, -3)", + post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)", + tags = ['streamline_testnet_complete'] +) }} + +SELECT + block_number, + file_name, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_testnet_receipts_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze_testnet__receipts') }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze_testnet__receipts_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1 \ No newline at end of file diff --git a/models/testnet/core/streamline/complete/streamline__testnet_traces_complete.sql b/models/testnet/core/streamline/complete/streamline__testnet_traces_complete.sql new file mode 100644 index 0000000..020961f --- /dev/null +++ b/models/testnet/core/streamline/complete/streamline__testnet_traces_complete.sql @@ -0,0 +1,33 @@ +-- depends_on: {{ ref('bronze_testnet__traces') }} + +{{ config ( + materialized = "incremental", + unique_key = "block_number", + cluster_by = "ROUND(block_number, -3)", + post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)", + tags = ['streamline_testnet_complete'] +) }} + +SELECT + block_number, + file_name, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_testnet_traces_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze_testnet__traces') }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze_testnet__traces_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1 \ No newline at end of file diff --git a/models/testnet/core/streamline/complete/streamline__testnet_transactions_complete.sql b/models/testnet/core/streamline/complete/streamline__testnet_transactions_complete.sql new file mode 100644 index 0000000..265502a --- /dev/null +++ b/models/testnet/core/streamline/complete/streamline__testnet_transactions_complete.sql @@ -0,0 +1,33 @@ +-- depends_on: {{ ref('bronze_testnet__transactions') }} + +{{ config ( + materialized = "incremental", + unique_key = "block_number", + cluster_by = "ROUND(block_number, -3)", + post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)", + tags = ['streamline_testnet_complete'] +) }} + +SELECT + block_number, + file_name, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_testnet_transactions_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze_testnet__transactions') }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze_testnet__transactions_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1 \ No newline at end of file diff --git a/models/testnet/core/streamline/history/streamline__testnet_blocks_transactions_history.sql b/models/testnet/core/streamline/history/streamline__testnet_blocks_transactions_history.sql new file mode 100644 index 0000000..bed3b32 --- /dev/null +++ b/models/testnet/core/streamline/history/streamline__testnet_blocks_transactions_history.sql @@ -0,0 +1,56 @@ +{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %} + +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params ={ "external_table" :"testnet_blocks_transactions", + "sql_limit" :"2000000", + "producer_batch_size" :"7200", + "worker_batch_size" :"1800", + "sql_source" :"{{this.identifier}}", + "async_concurrent_requests" :"1", + "exploded_key": tojson(["result", "result.transactions"]) } + ), + tags = ['streamline_testnet_history'] +) }} + +WITH to_do AS ( + SELECT block_number + FROM {{ ref("streamline__testnet_blocks") }} + EXCEPT + SELECT block_number + FROM {{ ref("streamline__testnet_blocks_complete") }} b + INNER JOIN {{ ref("streamline__testnet_transactions_complete") }} t USING(block_number) +), +ready_blocks AS ( + SELECT block_number + FROM to_do + where block_number < (select block_number from {{ ref("_testnet_block_lookback") }}) +) +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{Service}/{Authentication}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', 'streamline' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', 'eth_getBlockByNumber', + 'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE) + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +ORDER BY block_number desc + +LIMIT + 2000000 \ No newline at end of file diff --git a/models/testnet/core/streamline/history/streamline__testnet_receipts_history.sql b/models/testnet/core/streamline/history/streamline__testnet_receipts_history.sql new file mode 100644 index 0000000..2b79794 --- /dev/null +++ b/models/testnet/core/streamline/history/streamline__testnet_receipts_history.sql @@ -0,0 +1,55 @@ +{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %} + +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params ={ "external_table" :"testnet_receipts", + "sql_limit" :"2000000", + "producer_batch_size" :"7200", + "worker_batch_size" :"1800", + "sql_source" :"{{this.identifier}}", + "async_concurrent_requests" :"1", + "exploded_key": tojson(["result"]) } + ), + tags = ['streamline_testnet_history'] +) }} + +WITH to_do AS ( + SELECT block_number + FROM {{ ref("streamline__testnet_blocks") }} + EXCEPT + SELECT block_number + FROM {{ ref("streamline__testnet_receipts_complete") }} +), +ready_blocks AS ( + SELECT block_number + FROM to_do + where block_number < (select block_number from {{ ref("_testnet_block_lookback") }}) +) +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{Service}/{Authentication}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', 'streamline' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', 'eth_getBlockReceipts', + 'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number)) + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +ORDER BY block_number desc + +LIMIT + 2000000 \ No newline at end of file diff --git a/models/testnet/core/streamline/history/streamline__testnet_traces_history.sql b/models/testnet/core/streamline/history/streamline__testnet_traces_history.sql new file mode 100644 index 0000000..da8cfb1 --- /dev/null +++ b/models/testnet/core/streamline/history/streamline__testnet_traces_history.sql @@ -0,0 +1,55 @@ +{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %} + +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params ={ "external_table" :"testnet_traces", + "sql_limit" :"2000000", + "producer_batch_size" :"7200", + "worker_batch_size" :"1800", + "sql_source" :"{{this.identifier}}", + "async_concurrent_requests" :"1", + "exploded_key": tojson(["result"]) } + ), + tags = ['streamline_testnet_history'] +) }} + +WITH to_do AS ( + SELECT block_number + FROM {{ ref("streamline__testnet_blocks") }} + EXCEPT + SELECT block_number + FROM {{ ref("streamline__testnet_traces_complete") }} +), +ready_blocks AS ( + SELECT block_number + FROM to_do + where block_number < (select block_number from {{ ref("_testnet_block_lookback") }}) +) +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{Service}/{Authentication}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', 'streamline' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', 'debug_traceBlockByNumber', + 'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s')) + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +ORDER BY block_number desc + +LIMIT + 2000000 \ No newline at end of file diff --git a/models/testnet/core/streamline/realtime/streamline__testnet_blocks_transactions_realtime.sql b/models/testnet/core/streamline/realtime/streamline__testnet_blocks_transactions_realtime.sql new file mode 100644 index 0000000..8d7d984 --- /dev/null +++ b/models/testnet/core/streamline/realtime/streamline__testnet_blocks_transactions_realtime.sql @@ -0,0 +1,54 @@ +{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %} + +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params ={ "external_table" :"testnet_blocks_transactions", + "sql_limit" :"7200", + "producer_batch_size" :"3600", + "worker_batch_size" :"1800", + "sql_source" :"{{this.identifier}}", + "exploded_key": tojson(["result", "result.transactions"]) } + ), + tags = ['streamline_testnet_realtime'] +) }} + +WITH to_do AS ( + SELECT block_number + FROM {{ ref("streamline__testnet_blocks") }} + EXCEPT + SELECT block_number + FROM {{ ref("streamline__testnet_blocks_complete") }} b + INNER JOIN {{ ref("streamline__testnet_transactions_complete") }} t USING(block_number) +), +ready_blocks AS ( + SELECT block_number + FROM to_do +) +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{Service}/{Authentication}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', 'streamline' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', 'eth_getBlockByNumber', + 'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE) + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +ORDER BY block_number desc + +LIMIT + 7200 \ No newline at end of file diff --git a/models/testnet/core/streamline/realtime/streamline__testnet_receipts_realtime.sql b/models/testnet/core/streamline/realtime/streamline__testnet_receipts_realtime.sql new file mode 100644 index 0000000..227d581 --- /dev/null +++ b/models/testnet/core/streamline/realtime/streamline__testnet_receipts_realtime.sql @@ -0,0 +1,53 @@ +{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %} + +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params ={ "external_table" :"testnet_receipts", + "sql_limit" :"7200", + "producer_batch_size" :"1800", + "worker_batch_size" :"1800", + "sql_source" :"{{this.identifier}}", + "exploded_key": tojson(["result"]) } + ), + tags = ['streamline_testnet_realtime'] +) }} + +WITH to_do AS ( + SELECT block_number + FROM {{ ref("streamline__testnet_blocks") }} + EXCEPT + SELECT block_number + FROM {{ ref("streamline__testnet_receipts_complete") }} +), +ready_blocks AS ( + SELECT block_number + FROM to_do +) +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{Service}/{Authentication}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', 'streamline' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', 'eth_getBlockReceipts', + 'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number)) + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +ORDER BY block_number desc + +LIMIT + 7200 \ No newline at end of file diff --git a/models/testnet/core/streamline/realtime/streamline__testnet_traces_realtime.sql b/models/testnet/core/streamline/realtime/streamline__testnet_traces_realtime.sql new file mode 100644 index 0000000..8b05706 --- /dev/null +++ b/models/testnet/core/streamline/realtime/streamline__testnet_traces_realtime.sql @@ -0,0 +1,53 @@ +{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %} + +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params ={ "external_table" :"testnet_traces", + "sql_limit" :"7200", + "producer_batch_size" :"1800", + "worker_batch_size" :"1800", + "sql_source" :"{{this.identifier}}", + "exploded_key": tojson(["result"]) } + ), + tags = ['streamline_testnet_realtime'] +) }} + +WITH to_do AS ( + SELECT block_number + FROM {{ ref("streamline__testnet_blocks") }} + EXCEPT + SELECT block_number + FROM {{ ref("streamline__testnet_traces_complete") }} +), +ready_blocks AS ( + SELECT block_number + FROM to_do +) +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{Service}/{Authentication}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', 'streamline' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', 'debug_traceBlockByNumber', + 'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s')) + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +ORDER BY block_number desc + +LIMIT + 7200 \ No newline at end of file diff --git a/models/testnet/core/streamline/streamline__get_testnet_chainhead.sql b/models/testnet/core/streamline/streamline__get_testnet_chainhead.sql new file mode 100644 index 0000000..7ce2e82 --- /dev/null +++ b/models/testnet/core/streamline/streamline__get_testnet_chainhead.sql @@ -0,0 +1,28 @@ +{{ config ( + materialized = 'table', + tags = ['streamline_testnet_complete','chainhead'] +) }} + +SELECT + live.udf_api( + 'POST', + '{Service}/{Authentication}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', 'LiveQuery' + ), + OBJECT_CONSTRUCT( + 'id', + 0, + 'jsonrpc', + '2.0', + 'method', + 'eth_blockNumber', + 'params', + [] + ), + '{{ var('GLOBAL_NODE_SECRET_PATH') }}' + ) AS resp, + utils.udf_hex_to_int( + resp :data :result :: STRING + ) AS block_number \ No newline at end of file diff --git a/models/testnet/core/streamline/streamline__get_testnet_chainhead.yml b/models/testnet/core/streamline/streamline__get_testnet_chainhead.yml new file mode 100644 index 0000000..c2a8418 --- /dev/null +++ b/models/testnet/core/streamline/streamline__get_testnet_chainhead.yml @@ -0,0 +1,9 @@ +version: 2 +models: + - name: streamline__get_testnet_chainhead + description: "This model is used to get the chainhead from the blockchain." + + columns: + - name: BLOCK_NUMBER + tests: + - not_null \ No newline at end of file diff --git a/models/testnet/core/streamline/streamline__testnet_blocks.sql b/models/testnet/core/streamline/streamline__testnet_blocks.sql new file mode 100644 index 0000000..d5e1ea5 --- /dev/null +++ b/models/testnet/core/streamline/streamline__testnet_blocks.sql @@ -0,0 +1,28 @@ +{{ config ( + materialized = "view", + tags = ['streamline_testnet_complete'] +) }} + +select * +from ( +SELECT + _id, + ( + ({{ var('GLOBAL_BLOCKS_PER_HOUR',0) }} / 60) * {{ var('GLOBAL_CHAINHEAD_DELAY',3) }} + ) :: INT AS block_number_delay, --minute-based block delay + (_id - block_number_delay) :: INT AS block_number, + utils.udf_int_to_hex(block_number) AS block_number_hex +FROM + {{ ref('utils__number_sequence') }} +WHERE + _id <= ( + SELECT + COALESCE( + block_number, + 0 + ) + FROM + {{ ref("streamline__get_testnet_chainhead") }} + ) +) +where block_number > 0 \ No newline at end of file diff --git a/models/testnet/utils/_testnet_block_lookback.sql b/models/testnet/utils/_testnet_block_lookback.sql new file mode 100644 index 0000000..08074b8 --- /dev/null +++ b/models/testnet/utils/_testnet_block_lookback.sql @@ -0,0 +1,11 @@ +{{ config( + materialized = 'ephemeral' +) }} + +SELECT + COALESCE(MIN(block_number), 0) AS block_number +FROM + {{ ref("testnet__fact_blocks") }} +WHERE + block_timestamp >= DATEADD('hour', -72, TRUNCATE(SYSDATE(), 'HOUR')) + AND block_timestamp < DATEADD('hour', -71, TRUNCATE(SYSDATE(), 'HOUR')) \ No newline at end of file diff --git a/models/testnet/utils/utils__number_sequence.sql b/models/testnet/utils/utils__number_sequence.sql new file mode 100644 index 0000000..f98859d --- /dev/null +++ b/models/testnet/utils/utils__number_sequence.sql @@ -0,0 +1,19 @@ +{{ config( + materialized = 'incremental', + cluster_by = 'round(_id,-3)', + post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(_id)", + full_refresh = false, + tags = ['utils'] +) }} + +SELECT + ROW_NUMBER() over ( + ORDER BY + SEQ4() + ) - 1 :: INT AS _id +FROM + TABLE(GENERATOR(rowcount => 1000000000)) +WHERE 1=1 +{% if is_incremental() %} + AND 1=0 +{% endif %} \ No newline at end of file diff --git a/packages.yml b/packages.yml new file mode 100644 index 0000000..4d95580 --- /dev/null +++ b/packages.yml @@ -0,0 +1,3 @@ +packages: + - git: https://github.com/FlipsideCrypto/fsc-evm.git + revision: v3.21.1 \ No newline at end of file diff --git a/profiles.yml b/profiles.yml new file mode 100644 index 0000000..3e428a7 --- /dev/null +++ b/profiles.yml @@ -0,0 +1,31 @@ +rise: + target: prod + outputs: + dev: + type: snowflake + account: "{{ env_var('ACCOUNT') }}" + role: "{{ env_var('ROLE') }}" + user: "{{ env_var('USER') }}" + password: "{{ env_var('PASSWORD') }}" + region: "{{ env_var('REGION') }}" + database: "{{ env_var('DATABASE') }}" + warehouse: "{{ env_var('WAREHOUSE') }}" + schema: SILVER + threads: 4 + client_session_keep_alive: False + query_tag: curator + prod: + type: snowflake + account: "{{ env_var('ACCOUNT') }}" + role: "{{ env_var('ROLE') }}" + user: "{{ env_var('USER') }}" + password: "{{ env_var('PASSWORD') }}" + region: "{{ env_var('REGION') }}" + database: "{{ env_var('DATABASE') }}" + warehouse: "{{ env_var('WAREHOUSE') }}" + schema: SILVER + threads: 4 + client_session_keep_alive: False + query_tag: curator + config: + send_anonymous_usage_stats: False \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..39b82bb --- /dev/null +++ b/requirements.txt @@ -0,0 +1,2 @@ +dbt-snowflake>=1.7,<1.8 +protobuf==4.25.3 \ No newline at end of file diff --git a/selectors.yml b/selectors.yml new file mode 100644 index 0000000..f43565e --- /dev/null +++ b/selectors.yml @@ -0,0 +1,7 @@ +selectors: + - name: integration_tests + description: "Selector for integration tests" + definition: + union: + - method: fqn + value: "livequery_models.deploy.core._utils" \ No newline at end of file diff --git a/snapshots/.gitkeep b/snapshots/.gitkeep new file mode 100644 index 0000000..e69de29