test/fsc-evm-v4-base (#266)

* temp changes for v4

* docs, disabled models and dummy receipts by hash models

* removed unused vars

* workflows

* sources

* removed comment for successful compile

* package

* test project vars

* revert

* clean up

* update dbt project, workflows etc.

* update tags for new naming conventions

* blank model for overflow

* makefile

* makefile updates

* misc updates for abis

* temp source and array index

* add scores job

* sources update

* refs

* package

* updates

* package

* package

---------

Co-authored-by: Austin <austin@flipsidecrypto.com>
This commit is contained in:
drethereum 2025-04-22 10:55:58 -06:00 committed by GitHub
parent d4eadebe2b
commit c9d0177f47
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
639 changed files with 914 additions and 21759 deletions

View File

@ -0,0 +1,28 @@
name: dbt_alter_all_gha_tasks
run-name: dbt_alter_all_gha_tasks
on:
workflow_dispatch:
branches:
- "main"
inputs:
task_action:
type: choice
description: Action to perform on all tasks
required: true
options:
- RESUME
- SUSPEND
default: RESUME
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_alter_all_gha_tasks.yml@pre-release/v4-beta
with:
task_action: |
${{ inputs.task_action }}
target: prod
secrets: inherit

View File

@ -19,30 +19,16 @@ on:
- RESUME
default: SUSPEND
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_alter_gha_tasks.yml@AN-4374/upgrade-dbt-1.7
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_alter_gha_task.yml@pre-release/v4-beta
with:
workflow_name: |
${{ inputs.workflow_name }}
task_action: |
${{ inputs.task_action }}
environment: workflow_prod
target: prod
secrets: inherit

View File

@ -0,0 +1,19 @@
name: dbt_deploy_new_workflows
run-name: dbt_deploy_new_workflows
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Deploy New Github Actions
command: |
make deploy_new_github_action DBT_TARGET=prod
secrets: inherit

View File

@ -5,70 +5,10 @@ on:
branches:
- "main"
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: refresh ddl for datashare
run: |
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
- name: checkout docs branch
run: |
git checkout -B docs origin/main
- name: generate dbt docs
run: dbt docs generate -t prod
- name: move files to docs directory
run: |
mkdir -p ./docs
cp target/{catalog.json,manifest.json,index.html} docs/
- name: clean up target directory
run: dbt clean
- name: check for changes
run: git status
- name: stage changed files
run: git add .
- name: commit changed files
run: |
git config user.email "abc@xyz"
git config user.name "github-actions"
git commit -am "Auto-update docs"
- name: push changes to docs
run: |
git push -f --set-upstream origin docs
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_docs_update.yml@pre-release/v4-beta
secrets: inherit

View File

@ -3,32 +3,15 @@ run-name: ${{ github.event.inputs.branch }}
on:
workflow_dispatch:
inputs:
environment:
required: true
type: string
branches:
- "main"
concurrency: ${{ github.workflow }}
jobs:
prepare_vars:
runs-on: ubuntu-latest
environment:
name: ${{ inputs.environment }}
outputs:
warehouse: ${{ steps.set_outputs.outputs.warehouse }}
steps:
- name: Set warehouse output
id: set_outputs
run: |
echo "warehouse=${{ vars.WAREHOUSE }}" >> $GITHUB_OUTPUT
called_workflow_template:
needs: prepare_vars
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt.yml@main
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_integration_test.yml@pre-release/v4-beta
with:
command: >
command: |
dbt test --selector 'integration_tests'
environment: ${{ inputs.environment }}
warehouse: ${{ needs.prepare_vars.outputs.warehouse }}
secrets: inherit

View File

@ -1,51 +0,0 @@
name: dbt_run_abi_refresh
run-name: dbt_run_abi_refresh
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Update ABI models
run: |
dbt run -m "base_models,tag:abis"
- name: Kick off decoded logs history, if there are new ABIs from users
run: |
dbt run-operation run_decoded_logs_history

View File

@ -1,12 +1,12 @@
name: dbt_run_adhoc
run-name: dbt_run_adhoc
run-name: ${{ inputs.dbt_command }}
on:
workflow_dispatch:
branches:
- "main"
inputs:
environment:
target:
type: choice
description: DBT Run Environment
required: true
@ -26,42 +26,18 @@ on:
default: DBT
dbt_command:
type: string
description: "DBT Run Command"
description: 'DBT Run Command'
required: true
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ inputs.warehouse }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_${{ inputs.environment }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}
warehouse: ${{ inputs.warehouse }}
target: ${{ inputs.target }}
command_name: Run DBT Command
command: ${{ inputs.dbt_command }}
secrets: inherit

View File

@ -1,69 +0,0 @@
name: dbt_run_deployment
run-name: dbt_run_deployment
on:
workflow_dispatch:
branches:
- "main"
inputs:
warehouse:
type: choice
description: Snowflake warehouse
required: true
options:
- DBT
- DBT_CLOUD
- DBT_EMERGENCY
default: DBT
dbt_command:
type: string
description: 'DBT Run Command'
required: true
env:
DBT_PROFILES_DIR: ./
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ inputs.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
dbt:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}
- name: Run datashare model
run: |
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
dbt run-operation run_query --args "{sql: call admin.datashare.sp_grant_share_permissions('${{ env.DATABASE }}')}"
- name: Store logs
uses: actions/upload-artifact@v3
with:
name: dbt-logs
path: |
logs
target

View File

@ -3,45 +3,13 @@ run-name: dbt_run_dev_refresh
on:
workflow_dispatch:
schedule:
# Runs "at 2:10 UTC on Monday" (see https://crontab.guru)
- cron: '10 2 * * 1'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run-operation run_sp_create_prod_clone
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_dev_refresh.yml@pre-release/v4-beta
secrets: inherit

View File

@ -3,48 +3,20 @@ run-name: dbt_run_full_observability
on:
workflow_dispatch:
schedule:
# Runs “At 06:00 on day-of-month 1.” (see https://crontab.guru)
- cron: '0 18 1 * *'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod_2xl
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run --threads 2 --vars '{"OBSERV_FULL_TEST":True}' -m "base_models,tag:observability"
command_name: Run Observability Models
command: |
dbt run --threads 2 --vars '{"MAIN_OBSERV_FULL_TEST_ENABLED":True}' -m "fsc_evm,tag:observability"
secrets: inherit

View File

@ -3,45 +3,17 @@ run-name: dbt_run_heal_models
on:
workflow_dispatch:
schedule:
# Runs at 04:00 on Wednesday (see https://crontab.guru)
- cron: '0 4 * * 3'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m "base_models,tag:heal" --vars '{"HEAL_MODEL":True}'
command_name: Run Heal Models
command: |
dbt run -m "$PROJECT_NAME,tag:heal" --vars '{"HEAL_MODEL":True}'
secrets: inherit

View File

@ -1,54 +0,0 @@
name: dbt_run_operation_reorg
run-name: dbt_run_operation_reorg
on:
workflow_dispatch:
schedule:
# Runs at minute 45 every Sunday (see https://crontab.guru)
- cron: '45 0 * * 0'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: List reorg models
id: list_models
run: |
reorg_model_list=$(dbt list --select "base_models,tag:reorg" --resource-type model --output name | grep '__' | awk -F'.' '{print $NF}' | tr '\n' ',' | sed 's/,$//')
echo "model_list=$reorg_model_list" >> $GITHUB_OUTPUT
- name: Execute block_reorg macro
run: |
dbt run-operation fsc_utils.block_reorg --args "{reorg_model_list: '${{ steps.list_models.outputs.model_list }}', hours: '169'}" && awk '/SQL status/ {print; next} /DELETE FROM/{getline; print} /\/\* {/ {print}' logs/dbt.log

View File

@ -1,44 +0,0 @@
name: dbt_run_overflowed_traces
run-name: dbt_run_overflowed_traces
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m "base_models,tag:overflowed_traces" --vars '{"OVERFLOWED_TRACES":True}'

View File

@ -0,0 +1,22 @@
name: dbt_run_scheduled_abis
run-name: dbt_run_scheduled_abis
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run ABI Models
command: |
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:gold,tag:abis"
command_name_2: Kick off decoded logs history, if there are new user submitted ABIs
command_2: |
dbt run-operation fsc_evm.run_decoded_logs_history
secrets: inherit

View File

@ -6,41 +6,14 @@ on:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m "base_models,tag:curated"
command_name: Run Curated Models
command: |
dbt run -m "$PROJECT_NAME,tag:curated" "fsc_evm,tag:curated"
secrets: inherit

View File

@ -0,0 +1,19 @@
name: dbt_run_scheduled_decoder
run-name: dbt_run_scheduled_decoder
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Decoder Models
command: |
dbt run -m "fsc_evm,tag:bronze,tag:decoded_logs" "fsc_evm,tag:silver,tag:decoded_logs" "fsc_evm,tag:gold,tag:decoded_logs"
secrets: inherit

View File

@ -0,0 +1,22 @@
name: dbt_run_scheduled_main
run-name: dbt_run_scheduled_main
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Main Models
command: |
dbt run -m "fsc_evm,tag:silver,tag:core" "fsc_evm,tag:gold,tag:core" "fsc_evm,tag:silver,tag:prices" "fsc_evm,tag:gold,tag:prices" "fsc_evm,tag:silver,tag:labels" "fsc_evm,tag:gold,tag:labels" "fsc_evm,tag:gold,tag:nft"
command_name_2: Run Streamline Models
command_2: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete"
secrets: inherit

View File

@ -1,46 +0,0 @@
name: dbt_run_scheduled_non_realtime
run-name: dbt_run_scheduled_non_realtime
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "base_models,tag:non_realtime" "base_models,tag:streamline_decoded_logs_complete" "base_models,tag:streamline_decoded_logs_realtime"

View File

@ -0,0 +1,20 @@
name: dbt_run_scheduled_scores
run-name: dbt_run_scheduled_scores
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Scores Models
command: |
dbt run -m "fsc_evm,tag:scores"
secrets: inherit

View File

@ -6,45 +6,17 @@ on:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "base_models,tag:streamline_core_complete" "base_models,tag:streamline_core_realtime" "base_models,tag:streamline_core_complete_receipts" "base_models,tag:streamline_core_realtime_receipts" "base_models,tag:streamline_core_complete_confirm_blocks" "base_models,tag:streamline_core_realtime_confirm_blocks"
- name: Run Chainhead Tests
run: |
dbt test -m "base_models,tag:chainhead"
command_name: Run Chainhead Models
command: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash"
command_name_2: Run Chainhead Tests
command_2: |
dbt test -m "fsc_evm,tag:chainhead"
secrets: inherit

View File

@ -1,49 +0,0 @@
name: dbt_run_streamline_decoded_logs_history
run-name: dbt_run_streamline_decoded_logs_history
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Update complete table
run: |
dbt run -m "base_models,tag:streamline_decoded_logs_complete"
- name: Decode historical logs
run: |
dbt run-operation decoded_logs_history --vars '{"STREAMLINE_INVOKE_STREAMS":True}'

View File

@ -1,46 +0,0 @@
name: dbt_run_streamline_decoder
run-name: dbt_run_streamline_decoder
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m "base_models,tag:decoded_logs"

View File

@ -0,0 +1,22 @@
name: dbt_run_streamline_decoder_history
run-name: dbt_run_streamline_decoder_history
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Decoder Complete
command: |
dbt run -m "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete"
command_name_2: Run Streamline Decoder History
command_2: |
dbt run-operation fsc_evm.decoded_logs_history --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
secrets: inherit

View File

@ -0,0 +1,19 @@
name: dbt_run_streamline_history
run-name: dbt_run_streamline_history
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run History Models
command: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:history" --exclude "fsc_evm,tag:receipts_by_hash"
secrets: inherit

View File

@ -1,72 +0,0 @@
name: dbt_run_streamline_history_adhoc
run-name: dbt_run_streamline_history_adhoc
on:
workflow_dispatch:
branches:
- "main"
inputs:
environment:
type: choice
description: DBT Run Environment
required: true
options:
- dev
- prod
- prod_backfill
default: dev
warehouse:
type: choice
description: Snowflake warehouse
required: true
options:
- DBT
- DBT_CLOUD
- DBT_EMERGENCY
default: DBT
dbt_command:
type: choice
description: 'DBT Run Command'
required: true
options:
- dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "base_models,tag:streamline_core_complete" "base_models,tag:streamline_core_history" "base_models,tag:streamline_core_complete_receipts" "base_models,tag:streamline_core_history_receipts" "base_models,tag:streamline_core_complete_confirm_blocks" "base_models,tag:streamline_core_history_confirm_blocks"
- dbt run --threads 8 --vars '{"STREAMLINE_INVOKE_STREAMS":True,"WAIT":120}' -m "base_models,tag:streamline_decoded_logs_complete" "base_models,tag:streamline_decoded_logs_history"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ inputs.warehouse }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_${{ inputs.environment }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}

View File

@ -3,48 +3,21 @@ run-name: dbt_test_daily
on:
workflow_dispatch:
schedule:
# Runs "at 9:00 UTC" (see https://crontab.guru)
- cron: '0 9 * * *'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_test
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt test --exclude "base_models,tag:full_test" "base_models,tag:recent_test" "base_models,tag:base_goerli" "base_models,tag:gha_tasks" livequery_models
target: test
command_name: Build Daily Testing Views
command: |
dbt run -m "fsc_evm,tag:daily_test"
command_name_2: Run Daily Tests (all tests excluding full, recent and misc. others)
command_2: |
dbt test --exclude "fsc_evm,tag:full_test" "fsc_evm,tag:recent_test" "fsc_evm,tag:gha_tasks" livequery_models
secrets: inherit

View File

@ -3,49 +3,22 @@ run-name: dbt_test_intraday
on:
workflow_dispatch:
schedule:
# Runs “At minute 5 past every 4th hour.” (see https://crontab.guru)
- cron: '5 */4 * * *'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_test
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m "base_models,tag:observability"
dbt test -m "base_models,tag:recent_test"
target: test
command_name: Build Recent Testing Views
command: |
dbt run -m "fsc_evm,tag:recent_test"
command_name_2: Run Observability & Recent Tests
command_2: |
dbt run -m "fsc_evm,tag:observability"
dbt test -m "fsc_evm,tag:recent_test"
secrets: inherit

View File

@ -3,48 +3,21 @@ run-name: dbt_test_monthly
on:
workflow_dispatch:
schedule:
# Runs “At 7pm on the 28th of month.” (see https://crontab.guru)
- cron: '0 19 28 * *'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_test
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt test -m "base_models,tag:full_test"
target: test
command_name: Build Full Testing Views
command: |
dbt run -m "fsc_evm,tag:full_test"
command_name_2: Run Full Tests
command_2: |
dbt test -m "fsc_evm,tag:full_test"
secrets: inherit

View File

@ -17,6 +17,8 @@ test-paths: ["tests"]
seed-paths: ["data"]
macro-paths: ["macros"]
snapshot-paths: ["snapshots"]
docs-paths:
["dbt_packages/fsc_evm/doc_descriptions", "models/doc_descriptions", "models"]
target-path: "target" # directory which will store compiled SQL files
clean-targets: # directories to be removed by `dbt clean`
@ -49,11 +51,59 @@ query-comment:
# Full documentation: https://docs.getdbt.com/docs/configuring-models
models:
base_models: # replace with the name of the chain
+copy_grants: true
+persist_docs:
relation: true
columns: true
+on_schema_change: "append_new_columns"
livequery_base:
+materialized: ephemeral
fsc_evm:
+enabled: false # disable fsc_evm package by default
+copy_grants: true
+persist_docs:
relation: true
columns: true
+on_schema_change: "append_new_columns"
main_package:
+enabled: false # disable top level package by default, enabled subpackages as needed
admin:
+enabled: true
core:
+enabled: true # enable subpackages, as needed
bronze:
+enabled: false
token_reads:
+enabled: true
github_actions:
+enabled: true
labels:
+enabled: true
observability:
+enabled: true
prices:
+enabled: true
utils:
+enabled: true
decoder_package:
+enabled: false
abis:
+enabled: true
decoded_logs:
+enabled: false
gold:
+enabled: true
silver:
+enabled: true
streamline:
+enabled: true
curated_package:
+enabled: false
stats:
+enabled: true
scores_package:
+enabled: true
# In this example config, we tell dbt to build all models in the example/ directory
# as tables. These settings can be overridden in the individual model files
@ -62,19 +112,14 @@ models:
vars:
"dbt_date:time_zone": GMT
STREAMLINE_INVOKE_STREAMS: False
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
UPDATE_UDFS_AND_SPS: False
UPDATE_SNOWFLAKE_TAGS: True
OBSERV_FULL_TEST: False
OVERFLOWED_TRACES: False
WAIT: 0
HEAL_MODEL: False
HEAL_MODELS: []
START_GHA_TASKS: False
API_INTEGRATION: '{{ var("config")[target.name]["API_INTEGRATION"] if var("config")[target.name] else var("config")["dev"]["API_INTEGRATION"] }}'
EXTERNAL_FUNCTION_URI: '{{ var("config")[target.name]["EXTERNAL_FUNCTION_URI"] if var("config")[target.name] else var("config")["dev"]["EXTERNAL_FUNCTION_URI"] }}'
ROLES: |
["INTERNAL_DEV"]
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
#### STREAMLINE 2.0 BEGIN ####
@ -101,39 +146,3 @@ vars:
- DBT_CLOUD_BASE
#### STREAMLINE 2.0 END ####
#### FSC_EVM BEGIN ####
# Visit https://github.com/FlipsideCrypto/fsc-evm/wiki for more information on required and optional variables
### GLOBAL VARIABLES BEGIN ###
## REQUIRED
GLOBAL_PROD_DB_NAME: 'base'
GLOBAL_NODE_SECRET_PATH: 'Vault/prod/base/quicknode/base_mainnet'
GLOBAL_BLOCKS_PER_HOUR: 1800
GLOBAL_USES_STREAMLINE_V1: True
GLOBAL_USES_SINGLE_FLIGHT_METHOD: True
### GLOBAL VARIABLES END ###
### MAIN_PACKAGE VARIABLES BEGIN ###
### CORE ###
## REQUIRED
## OPTIONAL
TRACES_REALTIME_SQL_LIMIT: 3600
TRACES_REALTIME_PRODUCER_BATCH_SIZE: 900
TRACES_REALTIME_WORKER_BATCH_SIZE: 450
# GOLD_FULL_REFRESH: True
# SILVER_FULL_REFRESH: True
### MAIN_PACKAGE VARIABLES END ###
### DECODER_PACKAGE VARIABLES BEGIN ###
## REQUIRED
## OPTIONAL
### DECODER_PACKAGE VARIABLES END ###
#### FSC_EVM END ####

View File

@ -1,25 +1,9 @@
{% macro create_udfs() %}
{% if var("UPDATE_UDFS_AND_SPS") %}
{% if var("UPDATE_UDFS_AND_SPS", false) %}
{% set sql %}
CREATE schema if NOT EXISTS silver;
{{ create_udtf_get_base_table(
schema = "streamline"
) }}
{% endset %}
{% do run_query(sql) %}
{% if target.database != "BASE_COMMUNITY_DEV" %}
{% set sql %}
{{ create_udf_get_chainhead() }}
{{ create_udf_bulk_json_rpc() }}
{{ create_udf_bulk_get_traces() }}
{{ create_udf_decode_array_string() }}
{{ create_udf_decode_array_object() }}
{{ create_udf_bulk_decode_logs() }}
{% endset %}
{% do run_query(sql) %}
{% endif %}
{{- fsc_utils.create_udfs() -}}
{% endif %}
{% endmacro %}

View File

@ -1,22 +0,0 @@
{% macro standard_predicate(
input_column = 'block_number'
) -%}
{%- set database_name = target.database -%}
{%- set schema_name = generate_schema_name(
node = model
) -%}
{%- set table_name = generate_alias_name(
node = model
) -%}
{%- set tmp_table_name = table_name ~ '__dbt_tmp' -%}
{%- set full_table_name = database_name ~ '.' ~ schema_name ~ '.' ~ table_name -%}
{%- set full_tmp_table_name = database_name ~ '.' ~ schema_name ~ '.' ~ tmp_table_name -%}
{{ full_table_name }}.{{ input_column }} >= (
SELECT
MIN(
{{ input_column }}
)
FROM
{{ full_tmp_table_name }}
)
{%- endmacro %}

View File

@ -1,129 +0,0 @@
{% macro decoded_logs_history(backfill_mode=false) %}
{%- set params = {
"sql_limit": var("DECODED_LOGS_HISTORY_SQL_LIMIT", 8000000),
"producer_batch_size": var("DECODED_LOGS_HISTORY_PRODUCER_BATCH_SIZE", 400000),
"worker_batch_size": var("DECODED_LOGS_HISTORY_WORKER_BATCH_SIZE", 100000)
} -%}
{% set wait_time = var("DECODED_LOGS_HISTORY_WAIT_TIME", 60) %}
{% set find_months_query %}
SELECT
DISTINCT date_trunc('month', block_timestamp)::date as month
FROM {{ ref('core__fact_blocks') }}
ORDER BY month ASC
{% endset %}
{% set results = run_query(find_months_query) %}
{% if execute %}
{% set months = results.columns[0].values() %}
{% for month in months %}
{% set view_name = 'decoded_logs_history_' ~ month.strftime('%Y_%m') %}
{% set create_view_query %}
create or replace view streamline.{{view_name}} as (
WITH target_blocks AS (
SELECT
block_number
FROM {{ ref('core__fact_blocks') }}
WHERE date_trunc('month', block_timestamp) = '{{month}}'::timestamp
),
new_abis AS (
SELECT
abi,
parent_contract_address,
event_signature,
start_block,
end_block
FROM {{ ref('silver__complete_event_abis') }}
{% if not backfill_mode %}
WHERE inserted_timestamp > dateadd('day', -30, sysdate())
{% endif %}
),
existing_logs_to_exclude AS (
SELECT _log_id
FROM {{ ref('streamline__decoded_logs_complete') }} l
INNER JOIN target_blocks b using (block_number)
),
candidate_logs AS (
SELECT
l.block_number,
l.tx_hash,
l.event_index,
l.contract_address,
l.topics,
l.data,
concat(l.tx_hash::string, '-', l.event_index::string) as _log_id
FROM target_blocks b
INNER JOIN {{ ref('core__fact_event_logs') }} l using (block_number)
WHERE l.tx_status = 'SUCCESS' and date_trunc('month', l.block_timestamp) = '{{month}}'::timestamp
)
SELECT
l.block_number,
l._log_id,
A.abi,
OBJECT_CONSTRUCT(
'topics', l.topics,
'data', l.data,
'address', l.contract_address
) AS data
FROM candidate_logs l
INNER JOIN new_abis A
ON A.parent_contract_address = l.contract_address
AND A.event_signature = l.topics[0]::STRING
AND l.block_number BETWEEN A.start_block AND A.end_block
WHERE NOT EXISTS (
SELECT 1
FROM existing_logs_to_exclude e
WHERE e._log_id = l._log_id
)
LIMIT {{ params.sql_limit }}
)
{% endset %}
{# Create the view #}
{% do run_query(create_view_query) %}
{{ log("Created view for month " ~ month.strftime('%Y-%m'), info=True) }}
{% if var("STREAMLINE_INVOKE_STREAMS", false) %}
{# Check if rows exist first #}
{% set check_rows_query %}
SELECT EXISTS(SELECT 1 FROM streamline.{{view_name}} LIMIT 1)
{% endset %}
{% set results = run_query(check_rows_query) %}
{% set has_rows = results.columns[0].values()[0] %}
{% if has_rows %}
{# Invoke streamline, if rows exist to decode #}
{% set decode_query %}
SELECT
streamline.udf_bulk_decode_logs_v2(
PARSE_JSON(
$${ "external_table": "decoded_logs",
"producer_batch_size": {{ params.producer_batch_size }},
"sql_limit": {{ params.sql_limit }},
"sql_source": "{{view_name}}",
"worker_batch_size": {{ params.worker_batch_size }} }$$
)
);
{% endset %}
{% do run_query(decode_query) %}
{{ log("Triggered decoding for month " ~ month.strftime('%Y-%m'), info=True) }}
{# Call wait since we actually did some decoding #}
{% do run_query("call system$wait(" ~ wait_time ~ ")") %}
{{ log("Completed wait after decoding for month " ~ month.strftime('%Y-%m'), info=True) }}
{% else %}
{{ log("No rows to decode for month " ~ month.strftime('%Y-%m'), info=True) }}
{% endif %}
{% endif %}
{% endfor %}
{% endif %}
{% endmacro %}

View File

@ -1,31 +0,0 @@
{% macro run_decoded_logs_history() %}
{% set blockchain = var('GLOBAL_PROD_DB_NAME','').lower() %}
{% set check_for_new_user_abis_query %}
select 1
from {{ ref('silver__user_verified_abis') }}
where _inserted_timestamp::date = sysdate()::date
and dayname(sysdate()) <> 'Sat'
{% endset %}
{% set results = run_query(check_for_new_user_abis_query) %}
{% if execute %}
{% set new_user_abis = results.columns[0].values()[0] %}
{% if new_user_abis %}
{% set invoke_workflow_query %}
SELECT
github_actions.workflow_dispatches(
'FlipsideCrypto',
'{{ blockchain }}' || '-models',
'dbt_run_streamline_decoded_logs_history.yml',
NULL
)
{% endset %}
{% do run_query(invoke_workflow_query) %}
{% endif %}
{% endif %}
{% endmacro %}

View File

@ -1,226 +0,0 @@
{% macro silver_traces_v1(
full_reload_start_block,
full_reload_blocks,
full_reload_mode = false,
TRACES_ARB_MODE = false,
TRACES_SEI_MODE = false,
TRACES_KAIA_MODE = false,
use_partition_key = false,
schema_name = 'bronze'
) %}
WITH bronze_traces AS (
SELECT
block_number,
{% if use_partition_key %}
partition_key,
{% else %}
_partition_by_block_id AS partition_key,
{% endif %}
VALUE :array_index :: INT AS tx_position,
DATA :result AS full_traces,
{% if TRACES_SEI_MODE %}
DATA :txHash :: STRING AS tx_hash,
{% endif %}
_inserted_timestamp
FROM
{% if is_incremental() and not full_reload_mode %}
{{ ref(
schema_name ~ '__traces'
) }}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
)
AND DATA :result IS NOT NULL {% if TRACES_ARB_MODE %}
AND block_number > 22207817
{% endif %}
{% elif is_incremental() and full_reload_mode %}
{{ ref(
schema_name ~ '__traces_fr'
) }}
WHERE
{% if use_partition_key %}
partition_key BETWEEN (
SELECT
MAX(partition_key) - 100000
FROM
{{ this }}
)
AND (
SELECT
MAX(partition_key) + {{ full_reload_blocks }}
FROM
{{ this }}
)
{% else %}
_partition_by_block_id BETWEEN (
SELECT
MAX(_partition_by_block_id) - 100000
FROM
{{ this }}
)
AND (
SELECT
MAX(_partition_by_block_id) + {{ full_reload_blocks }}
FROM
{{ this }}
)
{% endif %}
{% if TRACES_ARB_MODE %}
AND block_number > 22207817
{% endif %}
{% else %}
{{ ref(
schema_name ~ '__traces_fr'
) }}
WHERE
{% if use_partition_key %}
partition_key <= {{ full_reload_start_block }}
{% else %}
_partition_by_block_id <= {{ full_reload_start_block }}
{% endif %}
{% if TRACES_ARB_MODE %}
AND block_number > 22207817
{% endif %}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number, tx_position
ORDER BY
_inserted_timestamp DESC)) = 1
),
flatten_traces AS (
SELECT
block_number,
{% if TRACES_SEI_MODE %}
tx_hash,
{% else %}
tx_position,
{% endif %}
partition_key,
IFF(
path IN (
'result',
'result.value',
'result.type',
'result.to',
'result.input',
'result.gasUsed',
'result.gas',
'result.from',
'result.output',
'result.error',
'result.revertReason',
'result.time',
'gasUsed',
'gas',
'type',
'to',
'from',
'value',
'input',
'error',
'output',
'time',
'revertReason'
{% if TRACES_ARB_MODE %},
'afterEVMTransfers',
'beforeEVMTransfers',
'result.afterEVMTransfers',
'result.beforeEVMTransfers'
{% endif %}
{% if TRACES_KAIA_MODE %},
'reverted',
'result.reverted'
{% endif %}
),
'ORIGIN',
REGEXP_REPLACE(REGEXP_REPLACE(path, '[^0-9]+', '_'), '^_|_$', '')
) AS trace_address,
_inserted_timestamp,
OBJECT_AGG(
key,
VALUE
) AS trace_json,
CASE
WHEN trace_address = 'ORIGIN' THEN NULL
WHEN POSITION(
'_' IN trace_address
) = 0 THEN 'ORIGIN'
ELSE REGEXP_REPLACE(
trace_address,
'_[0-9]+$',
'',
1,
1
)
END AS parent_trace_address,
SPLIT(
trace_address,
'_'
) AS trace_address_array
FROM
bronze_traces txs,
TABLE(
FLATTEN(
input => PARSE_JSON(
txs.full_traces
),
recursive => TRUE
)
) f
WHERE
f.index IS NULL
AND f.key != 'calls'
AND f.path != 'result'
{% if TRACES_ARB_MODE %}
AND f.path NOT LIKE 'afterEVMTransfers[%'
AND f.path NOT LIKE 'beforeEVMTransfers[%'
{% endif %}
{% if TRACES_KAIA_MODE %}
and f.key not in ('message', 'contract')
{% endif %}
GROUP BY
block_number,
{% if TRACES_SEI_MODE %}
tx_hash,
{% else %}
tx_position,
{% endif %}
partition_key,
trace_address,
_inserted_timestamp
)
SELECT
block_number,
{% if TRACES_SEI_MODE %}
tx_hash,
{% else %}
tx_position,
{% endif %}
trace_address,
parent_trace_address,
trace_address_array,
trace_json,
partition_key,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['block_number'] +
(['tx_hash'] if TRACES_SEI_MODE else ['tx_position']) +
['trace_address']
) }} AS traces_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
flatten_traces qualify(ROW_NUMBER() over(PARTITION BY traces_id
ORDER BY
_inserted_timestamp DESC)) = 1
{% endmacro %}

View File

@ -1,101 +0,0 @@
{% macro streamline_external_table_query_decoder(
source_name,
source_version
) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
WITH meta AS (
SELECT
job_created_time AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number,
TO_DATE(
concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5))
) AS _partition_by_created_date
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}')
) A
)
SELECT
block_number,
id :: STRING AS id,
DATA,
metadata,
b.file_name,
_inserted_timestamp,
s._partition_by_block_number AS _partition_by_block_number,
s._partition_by_created_date AS _partition_by_created_date
FROM
{{ source(
"bronze_streamline",
source_name ~ source_version
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date
WHERE
b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date
AND s._partition_by_created_date >= DATEADD('day', -2, CURRENT_TIMESTAMP())
AND DATA :error IS NULL
AND DATA IS NOT NULL
{% endmacro %}
{% macro streamline_external_table_query_decoder_fr(
source_name,
source_version
) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number,
TO_DATE(
concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5))
) AS _partition_by_created_date
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}'
)
) A
)
SELECT
block_number,
id :: STRING AS id,
DATA,
metadata,
b.file_name,
_inserted_timestamp,
s._partition_by_block_number AS _partition_by_block_number,
s._partition_by_created_date AS _partition_by_created_date
FROM
{{ source(
"bronze_streamline",
source_name ~ source_version
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date
WHERE
b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date
AND DATA :error IS NULL
AND DATA IS NOT NULL
{% endmacro %}

View File

@ -1,141 +0,0 @@
{% macro streamline_external_table_query(
source_name,
source_version,
partition_function,
balances,
block_number,
uses_receipts_by_hash
) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
WITH meta AS (
SELECT
job_created_time AS _inserted_timestamp,
file_name,
{{ partition_function }} AS partition_key
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}')
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp
{% if balances %},
r.block_timestamp :: TIMESTAMP AS block_timestamp
{% endif %}
{% if block_number %},
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
{% endif %}
{% if uses_receipts_by_hash %},
s.value :"TX_HASH" :: STRING AS tx_hash
{% endif %}
FROM
{{ source(
"bronze_streamline",
source_name ~ source_version
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.partition_key
{% if balances %}
JOIN {{ ref('_block_ranges') }}
r
ON r.block_number = COALESCE(
s.value :"BLOCK_NUMBER" :: INT,
s.value :"block_number" :: INT
)
{% endif %}
WHERE
b.partition_key = s.partition_key
AND DATA :error IS NULL
AND DATA IS NOT NULL
{% endmacro %}
{% macro streamline_external_table_query_fr(
source_name,
source_version,
partition_function,
partition_join_key,
balances,
block_number,
uses_receipts_by_hash
) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
{{ partition_function }} AS partition_key
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}'
)
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp
{% if balances %},
r.block_timestamp :: TIMESTAMP AS block_timestamp
{% endif %}
{% if block_number %},
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.value :"block_number" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
{% endif %}
{% if uses_receipts_by_hash %},
s.value :"TX_HASH" :: STRING AS tx_hash
{% endif %}
FROM
{{ source(
"bronze_streamline",
source_name ~ source_version
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.{{ partition_join_key }}
{% if balances %}
JOIN {{ ref('_block_ranges') }}
r
ON r.block_number = COALESCE(
s.value :"BLOCK_NUMBER" :: INT,
s.value :"block_number" :: INT
)
{% endif %}
WHERE
b.partition_key = s.{{ partition_join_key }}
AND DATA :error IS NULL
AND DATA IS NOT NULL
{% endmacro %}

View File

@ -1,36 +0,0 @@
{% macro log_bronze_details(source_name, source_version, model_type, partition_function, partition_join_key, block_number, uses_receipts_by_hash) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
{% if model_type != '' %}
{% set model_type = '_' ~ model_type %}
{% endif %}
{%- if flags.WHICH == 'compile' and execute -%}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log(source_name ~ model_type ~ '_PARTITION_FUNCTION: ' ~ partition_function, info=True) }}
{{ log(source_name ~ model_type ~ '_PARTITION_JOIN_KEY: ' ~ partition_join_key, info=True) }}
{{ log(source_name ~ model_type ~ '_BLOCK_NUMBER: ' ~ block_number, info=True) }}
{% if uses_receipts_by_hash %}
{{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }}
{% endif %}
{{ log("", info=True) }}
{{ log("=== Source Details ===", info=True) }}
{{ log("Source: " ~ source('bronze_streamline', source_name.lower() ~ source_version.lower()), info=True) }}
{{ log("", info=True) }}
{% set config_log = '\n' %}
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
{% set config_log = config_log ~ '\n{{ config (\n' %}
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
{% set config_log = config_log ~ ') }}\n' %}
{{ log(config_log, info=True) }}
{{ log("", info=True) }}
{%- endif -%}
{% endmacro %}

View File

@ -1,29 +0,0 @@
{% macro log_complete_details(post_hook, full_refresh_type, uses_receipts_by_hash) %}
{%- if flags.WHICH == 'compile' and execute -%}
{% if uses_receipts_by_hash %}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }}
{% endif %}
{% set config_log = '\n' %}
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
{% set config_log = config_log ~ '\n{{ config (\n' %}
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
{% set config_log = config_log ~ ' unique_key = "' ~ config.get('unique_key') ~ '",\n' %}
{% set config_log = config_log ~ ' cluster_by = "' ~ config.get('cluster_by') ~ '",\n' %}
{% set config_log = config_log ~ ' merge_update_columns = ' ~ config.get('merge_update_columns') | tojson ~ ',\n' %}
{% set config_log = config_log ~ ' post_hook = "' ~ post_hook ~ '",\n' %}
{% set config_log = config_log ~ ' incremental_predicates = ' ~ config.get('incremental_predicates') | tojson ~ ',\n' %}
{% set config_log = config_log ~ ' full_refresh = ' ~ full_refresh_type ~ ',\n' %}
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
{% set config_log = config_log ~ ') }}\n' %}
{{ log(config_log, info=True) }}
{{ log("", info=True) }}
{%- endif -%}
{% endmacro %}

View File

@ -1,36 +0,0 @@
{% macro log_model_details(vars=false, params=false) %}
{%- if execute -%}
/*
DBT Model Config:
{{ model.config | tojson(indent=2) }}
*/
{% if vars is not false %}
{% if var('LOG_MODEL_DETAILS', false) %}
{{ log( vars | tojson(indent=2), info=True) }}
{% endif %}
/*
Variables:
{{ vars | tojson(indent=2) }}
*/
{% endif %}
{% if params is not false %}
{% if var('LOG_MODEL_DETAILS', false) %}
{{ log( params | tojson(indent=2), info=True) }}
{% endif %}
/*
Parameters:
{{ params | tojson(indent=2) }}
*/
{% endif %}
/*
Raw Code:
{{ model.raw_code }}
*/
{%- endif -%}
{% endmacro %}

View File

@ -1,55 +0,0 @@
{% macro log_streamline_details(model_name, model_type, node_url, model_quantum_state, sql_limit, testing_limit, order_by_clause, new_build, streamline_params, uses_receipts_by_hash, method, method_params, min_block=0) %}
{%- if flags.WHICH == 'compile' and execute -%}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log("START_UP_BLOCK: " ~ min_block, info=True) }}
{{ log("", info=True) }}
{{ log("=== API Details ===", info=True) }}
{{ log("NODE_URL: " ~ node_url, info=True) }}
{{ log("NODE_SECRET_PATH: " ~ var('GLOBAL_NODE_SECRET_PATH'), info=True) }}
{{ log("", info=True) }}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_model_quantum_state').upper() ~ ': ' ~ model_quantum_state, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_sql_limit').upper() ~ ': ' ~ sql_limit, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_testing_limit').upper() ~ ': ' ~ testing_limit, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper() ~ ': ' ~ order_by_clause, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_new_build').upper() ~ ': ' ~ new_build, info=True) }}
{{ log('USES_RECEIPTS_BY_HASH' ~ ': ' ~ uses_receipts_by_hash, info=True) }}
{{ log("", info=True) }}
{{ log("=== RPC Details ===", info=True) }}
{{ log(model_name ~ ": {", info=True) }}
{{ log(" method: '" ~ method ~ "',", info=True) }}
{{ log(" method_params: " ~ method_params, info=True) }}
{{ log("}", info=True) }}
{{ log("", info=True) }}
{% set params_str = streamline_params | tojson %}
{% set params_formatted = params_str | replace('{', '{\n ') | replace('}', '\n }') | replace(', ', ',\n ') %}
{# Clean up the method_params formatting #}
{% set params_formatted = params_formatted | replace('"method_params": "', '"method_params": "') | replace('\\n', ' ') | replace('\\u0027', "'") %}
{% set config_log = '\n' %}
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
{% set config_log = config_log ~ '\n{{ config (\n' %}
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
{% set config_log = config_log ~ ' post_hook = fsc_utils.if_data_call_function_v2(\n' %}
{% set config_log = config_log ~ ' func = "streamline.udf_bulk_rest_api_v2",\n' %}
{% set config_log = config_log ~ ' target = "' ~ this.schema ~ '.' ~ this.identifier ~ '",\n' %}
{% set config_log = config_log ~ ' params = ' ~ params_formatted ~ '\n' %}
{% set config_log = config_log ~ ' ),\n' %}
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
{% set config_log = config_log ~ ') }}\n' %}
{{ log(config_log, info=True) }}
{{ log("", info=True) }}
{%- endif -%}
{% endmacro %}

View File

@ -1,47 +0,0 @@
{% macro set_default_variables_streamline(model_name, model_type) %}
{%- set node_url = var('GLOBAL_NODE_URL', '{Service}/{Authentication}') -%}
{%- set node_secret_path = var('GLOBAL_NODE_SECRET_PATH', '') -%}
{%- set model_quantum_state = var((model_name ~ '_' ~ model_type ~ '_quantum_state').upper(), 'streamline') -%}
{%- set testing_limit = var((model_name ~ '_' ~ model_type ~ '_testing_limit').upper(), none) -%}
{%- set new_build = var((model_name ~ '_' ~ model_type ~ '_new_build').upper(), false) -%}
{%- set default_order = 'ORDER BY partition_key DESC, block_number DESC' if model_type.lower() == 'realtime'
else 'ORDER BY partition_key ASC, block_number ASC' -%}
{%- set order_by_clause = var((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper(), default_order) -%}
{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%}
{%- set variables = {
'node_url': node_url,
'node_secret_path': node_secret_path,
'model_quantum_state': model_quantum_state,
'testing_limit': testing_limit,
'new_build': new_build,
'order_by_clause': order_by_clause,
'uses_receipts_by_hash': uses_receipts_by_hash
} -%}
{{ return(variables) }}
{% endmacro %}
{% macro set_default_variables_bronze(source_name, model_type) %}
{%- set partition_function = var(source_name ~ model_type ~ '_PARTITION_FUNCTION',
"CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER)")
-%}
{%- set partition_join_key = var(source_name ~ model_type ~ '_PARTITION_JOIN_KEY', 'partition_key') -%}
{%- set block_number = var(source_name ~ model_type ~ '_BLOCK_NUMBER', true) -%}
{%- set balances = var(source_name ~ model_type ~ '_BALANCES', false) -%}
{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%}
{%- set variables = {
'partition_function': partition_function,
'partition_join_key': partition_join_key,
'block_number': block_number,
'balances': balances,
'uses_receipts_by_hash': uses_receipts_by_hash
} -%}
{{ return(variables) }}
{% endmacro %}

View File

@ -1,57 +0,0 @@
{% macro set_streamline_parameters(model_name, model_type, multiplier=1) %}
{%- set rpc_config_details = {
"blocks_transactions": {
"method": 'eth_getBlockByNumber',
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE)',
"exploded_key": ['result', 'result.transactions']
},
"receipts_by_hash": {
"method": 'eth_getTransactionReceipt',
"method_params": 'ARRAY_CONSTRUCT(tx_hash)'
},
"receipts": {
"method": 'eth_getBlockReceipts',
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number))',
"exploded_key": ['result'],
"lambdas": 2
},
"traces": {
"method": 'debug_traceBlockByNumber',
"method_params": "ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s'))",
"exploded_key": ['result'],
"lambdas": 2
},
"confirm_blocks": {
"method": 'eth_getBlockByNumber',
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), FALSE)'
}
} -%}
{%- set rpc_config = rpc_config_details[model_name.lower()] -%}
{%- set params = {
"external_table": var((model_name ~ '_' ~ model_type ~ '_external_table').upper(), model_name.lower()),
"sql_limit": var((model_name ~ '_' ~ model_type ~ '_sql_limit').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier),
"producer_batch_size": var((model_name ~ '_' ~ model_type ~ '_producer_batch_size').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier),
"worker_batch_size": var(
(model_name ~ '_' ~ model_type ~ '_worker_batch_size').upper(),
(2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier) // (rpc_config.get('lambdas', 1))
),
"sql_source": (model_name ~ '_' ~ model_type).lower(),
"method": rpc_config['method'],
"method_params": rpc_config['method_params']
} -%}
{%- if rpc_config.get('exploded_key') is not none -%}
{%- do params.update({"exploded_key": tojson(rpc_config['exploded_key'])}) -%}
{%- endif -%}
{%- if rpc_config.get('lambdas') is not none -%}
{%- do params.update({"lambdas": rpc_config['lambdas']}) -%}
{%- endif -%}
{{ return(params) }}
{% endmacro %}

View File

@ -1,21 +0,0 @@
{% macro create_aws_base_api() %}
{{ log(
"Creating integration for target:" ~ target
) }}
{% if target.name == "prod" %}
{% set sql %}
CREATE api integration IF NOT EXISTS aws_base_api api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/base-api-prod-rolesnowflakeudfsAF733095-FFKP94OAGPXW' api_allowed_prefixes = (
'https://u27qk1trpc.execute-api.us-east-1.amazonaws.com/prod/'
) enabled = TRUE;
{% endset %}
{% do run_query(sql) %}
{% elif target.name == "dev" %}
{% set sql %}
CREATE api integration IF NOT EXISTS aws_base_api_dev api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/base-api-dev-rolesnowflakeudfsAF733095-I354FW5312ZX' api_allowed_prefixes = (
'https://rijt3fsk7b.execute-api.us-east-1.amazonaws.com/dev/'
) enabled = TRUE;
{% endset %}
{% do run_query(sql) %}
{% endif %}
{% endmacro %}

View File

@ -1,24 +0,0 @@
{% macro create_udtf_get_base_table(schema) %}
create or replace function {{ schema }}.udtf_get_base_table(max_height integer)
returns table (height number)
as
$$
with base as (
select
row_number() over (
order by
seq4()
) as id
from
table(generator(rowcount => 100000000))
)
select
id as height
from
base
where
id <= max_height
$$
;
{% endmacro %}

View File

@ -1,67 +0,0 @@
{% macro create_udf_get_chainhead() %}
CREATE
OR REPLACE EXTERNAL FUNCTION streamline.udf_get_chainhead() returns variant api_integration =
{% if target.name == "prod" %}
aws_base_api AS 'https://u27qk1trpc.execute-api.us-east-1.amazonaws.com/prod/get_chainhead'
{% else %}
aws_base_api_dev AS 'https://rijt3fsk7b.execute-api.us-east-1.amazonaws.com/dev/get_chainhead'
{%- endif %};
{% endmacro %}
{% macro create_udf_bulk_json_rpc() %}
CREATE
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_json_rpc(
json variant
) returns text api_integration = {% if target.name == "prod" %}
aws_base_api AS 'https://u27qk1trpc.execute-api.us-east-1.amazonaws.com/prod/udf_bulk_json_rpc'
{% else %}
aws_base_api_dev AS 'https://rijt3fsk7b.execute-api.us-east-1.amazonaws.com/dev/udf_bulk_json_rpc'
{%- endif %};
{% endmacro %}
{% macro create_udf_bulk_get_traces() %}
CREATE
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_get_traces(
json variant
) returns text api_integration = {% if target.name == "prod" %}
aws_base_api AS 'https://u27qk1trpc.execute-api.us-east-1.amazonaws.com/prod/udf_bulk_get_traces'
{% else %}
aws_base_api_dev AS 'https://rijt3fsk7b.execute-api.us-east-1.amazonaws.com/dev/udf_bulk_get_traces'
{%- endif %};
{% endmacro %}
{% macro create_udf_decode_array_string() %}
CREATE
OR REPLACE EXTERNAL FUNCTION streamline.udf_decode(
abi ARRAY,
DATA STRING
) returns ARRAY api_integration = {% if target.name == "prod" %}
aws_base_api AS 'https://u27qk1trpc.execute-api.us-east-1.amazonaws.com/prod/decode_function'
{% else %}
aws_base_api_dev AS 'https://rijt3fsk7b.execute-api.us-east-1.amazonaws.com/dev/decode_function'
{%- endif %};
{% endmacro %}
{% macro create_udf_decode_array_object() %}
CREATE
OR REPLACE EXTERNAL FUNCTION streamline.udf_decode(
abi ARRAY,
DATA OBJECT
) returns ARRAY api_integration = {% if target.name == "prod" %}
aws_base_api AS 'https://u27qk1trpc.execute-api.us-east-1.amazonaws.com/prod/decode_log'
{% else %}
aws_base_api_dev AS 'https://rijt3fsk7b.execute-api.us-east-1.amazonaws.com/dev/decode_log'
{%- endif %};
{% endmacro %}
{% macro create_udf_bulk_decode_logs() %}
CREATE
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_decode_logs(
json OBJECT
) returns ARRAY api_integration = {% if target.name == "prod" %}
aws_base_api AS 'https://u27qk1trpc.execute-api.us-east-1.amazonaws.com/prod/bulk_decode_logs'
{% else %}
aws_base_api_dev AS'https://rijt3fsk7b.execute-api.us-east-1.amazonaws.com/dev/bulk_decode_logs'
{%- endif %};
{% endmacro %}

View File

@ -1,25 +0,0 @@
{% test missing_decoded_logs(model) %}
SELECT
l.block_number,
CONCAT(
l.tx_hash,
'-',
l.event_index
) AS _log_id
FROM
{{ ref('core__fact_event_logs') }}
l
LEFT JOIN {{ model }}
d
ON l.block_number = d.block_number
AND CONCAT(
l.tx_hash,
'-',
l.event_index
) = d._log_id
WHERE
l.contract_address = LOWER('0x4200000000000000000000000000000000000006') -- WETH
AND l.topics [0] :: STRING = '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef' -- Transfer
AND l.block_timestamp BETWEEN DATEADD('hour', -48, SYSDATE())
AND DATEADD('hour', -6, SYSDATE())
AND d._log_id IS NULL {% endtest %}

View File

@ -1,104 +0,0 @@
{% macro missing_txs(
model
) %}
WITH txs_base AS (
SELECT
block_number AS base_block_number,
tx_hash AS base_tx_hash
FROM
{{ ref('test_silver__transactions_full') }}
),
model_name AS (
SELECT
block_number AS model_block_number,
tx_hash AS model_tx_hash
FROM
{{ model }}
)
SELECT
base_block_number,
base_tx_hash,
model_block_number,
model_tx_hash
FROM
txs_base
LEFT JOIN model_name
ON base_block_number = model_block_number
AND base_tx_hash = model_tx_hash
WHERE
(
model_tx_hash IS NULL
OR model_block_number IS NULL
)
{% endmacro %}
{% macro recent_missing_txs(
model
) %}
WITH txs_base AS (
SELECT
block_number AS base_block_number,
tx_hash AS base_tx_hash
FROM
{{ ref('test_silver__transactions_recent') }}
),
model_name AS (
SELECT
block_number AS model_block_number,
tx_hash AS model_tx_hash
FROM
{{ model }}
)
SELECT
base_block_number,
base_tx_hash,
model_block_number,
model_tx_hash
FROM
txs_base
LEFT JOIN model_name
ON base_block_number = model_block_number
AND base_tx_hash = model_tx_hash
WHERE
(model_tx_hash IS NULL
OR model_block_number IS NULL)
AND base_tx_hash <> '0x13b126388e78adc0fff1b40888b2cd87e6ec0d6c3c9838ee26119b81173bcf25'
{% endmacro %}
{% macro missing_confirmed_txs(
model1,
model2
) %}
WITH txs_base AS (
SELECT
block_number AS base_block_number,
block_hash AS base_block_hash,
tx_hash AS base_tx_hash
FROM
{{ model1 }}
),
model_name AS (
SELECT
block_number AS model_block_number,
block_hash AS model_block_hash,
tx_hash AS model_tx_hash
FROM
{{ model2 }}
)
SELECT
DISTINCT base_block_number AS block_number
FROM
txs_base
LEFT JOIN model_name
ON base_block_number = model_block_number
AND base_tx_hash = model_tx_hash
AND base_block_hash = model_block_hash
WHERE
model_tx_hash IS NULL
AND model_block_number <= (
SELECT
MAX(base_block_number)
FROM
txs_base
)
{% endmacro %}

View File

@ -1,47 +0,0 @@
{% test vertex_missing_products(
model,
filter) %}
with recent_records as (
select * from {{model}}
where modified_timestamp >= SYSDATE() - INTERVAL '7 days'
),
invalid_product_ids as (
select distinct product_id
from {{ ref('silver__vertex_dim_products') }}
where product_id not in (select product_id from recent_records)
AND block_timestamp < sysdate() - INTERVAL '2 days'
{% if filter %}
AND {{ filter }}
{% endif %}
)
select *
from invalid_product_ids
{% endtest %}
{% test vertex_product_level_recency(
model,
filter) %}
with recent_records as (
select distinct(product_id) from {{model}}
where block_timestamp >= SYSDATE() - INTERVAL '7 days'
),
invalid_product_ids as (
select *
from {{ ref('silver__vertex_dim_products') }}
where product_id not in (select product_id from recent_records)
AND block_timestamp < sysdate() - INTERVAL '2 days'
{% if filter %}
AND {{ filter }}
{% endif %}
)
select *
from invalid_product_ids
{% endtest %}

View File

@ -1,78 +0,0 @@
{% macro if_data_call_function(
func,
target
) %}
{% if var(
"STREAMLINE_INVOKE_STREAMS"
) %}
{% if execute %}
{{ log(
"Running macro `if_data_call_function`: Calling udf " ~ func ~ " on " ~ target,
True
) }}
{% endif %}
SELECT
{{ func }}
WHERE
EXISTS(
SELECT
1
FROM
{{ target }}
LIMIT
1
)
{% else %}
{% if execute %}
{{ log(
"Running macro `if_data_call_function`: NOOP",
False
) }}
{% endif %}
SELECT
NULL
{% endif %}
{% endmacro %}
{% macro if_data_call_wait() %}
{% if var(
"STREAMLINE_INVOKE_STREAMS"
) %}
{% set query %}
SELECT
1
WHERE
EXISTS(
SELECT
1
FROM
{{ model.schema ~ "." ~ model.alias }}
LIMIT
1
) {% endset %}
{% if execute %}
{% set results = run_query(
query
) %}
{% if results %}
{{ log(
"Waiting...",
info = True
) }}
{% set wait_query %}
SELECT
system$wait(
{{ var(
"WAIT",
600
) }}
) {% endset %}
{% do run_query(wait_query) %}
{% else %}
SELECT
NULL;
{% endif %}
{% endif %}
{% endif %}
{% endmacro %}

41
makefile Normal file
View File

@ -0,0 +1,41 @@
DBT_TARGET ?= dev
cleanup_time:
rm -f package-lock.yml && dbt clean && dbt deps
deploy_github_actions:
dbt run -s livequery_base.deploy.marketplace.github --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET)
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET)
dbt run-operation fsc_evm.create_gha_tasks --vars '{"START_GHA_TASKS":False}' -t $(DBT_TARGET)
deploy_new_github_action:
dbt run-operation fsc_evm.drop_github_actions_schema -t $(DBT_TARGET)
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET)
dbt run-operation fsc_evm.create_gha_tasks --vars '{"START_GHA_TASKS":False}' -t $(DBT_TARGET)
deploy_livequery:
dbt run-operation fsc_evm.drop_livequery_schemas --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET)
dbt run -m livequery_base.deploy.core --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET)
dbt run-operation fsc_evm.livequery_grants --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET)
deploy_chain_phase_1:
dbt run -m livequery_base.deploy.core --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET)
dbt run-operation fsc_evm.livequery_grants --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET)
dbt run-operation fsc_evm.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET)
dbt run-operation fsc_evm.call_sample_rpc_node -t $(DBT_TARGET)
dbt run -m "fsc_evm,tag:phase_1" --full-refresh --vars '{"GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET)
# kick chainhead workflow
# wait ~10 minutes
# run deploy_chain_phase_2
deploy_chain_phase_2:
dbt run -m "fsc_evm,tag:phase_2" --full-refresh --vars '{"GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_BRONZE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true}' -t $(DBT_TARGET)
make deploy_github_actions -t $(DBT_TARGET)
# tasks set to SUSPEND by default
# kick alter_gha_task workflow to RESUME individual tasks, as needed
deploy_chain_phase_3:
dbt run -m "fsc_evm,tag:phase_3" --full-refresh --vars '{"GLOBAL_BRONZE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true}' -t $(DBT_TARGET)
# kick alter_gha_task workflow to RESUME individual tasks, as needed
.PHONY: deploy_github_actions cleanup_time deploy_new_github_action deploy_chain_phase_1 deploy_chain_phase_2 deploy_chain_phase_3 deploy_livequery

View File

@ -1,87 +0,0 @@
{{ config (
materialized = "ephemeral"
) }}
WITH retry AS (
SELECT
contract_address,
GREATEST(
latest_call_block,
latest_event_block
) AS block_number,
total_interaction_count
FROM
{{ ref("silver__relevant_contracts") }}
r
LEFT JOIN {{ source(
'base_silver',
'verified_abis'
) }}
v USING (contract_address)
WHERE
r.total_interaction_count >= 250 -- high interaction count
AND GREATEST(
max_inserted_timestamp_logs,
max_inserted_timestamp_traces
) >= CURRENT_DATE - INTERVAL '30 days' -- recent activity
AND v.contract_address IS NULL -- no verified abi
AND r.contract_address NOT IN (
SELECT
contract_address
FROM
{{ source(
'base_bronze_api',
'contract_abis'
) }}
WHERE
_inserted_timestamp >= CURRENT_DATE - INTERVAL '30 days' -- this won't let us retry the same contract within 30 days
AND abi_data :data :result :: STRING <> 'Max rate limit reached'
)
ORDER BY
total_interaction_count DESC
LIMIT
5
), FINAL AS (
SELECT
proxy_address AS contract_address,
start_block AS block_number
FROM
{{ ref("silver__proxies") }}
p
JOIN retry r USING (contract_address)
LEFT JOIN {{ source(
'base_silver',
'verified_abis'
) }}
v
ON v.contract_address = p.proxy_address
WHERE
v.contract_address IS NULL
AND p.contract_address NOT IN (
SELECT
contract_address
FROM
{{ source(
'base_bronze_api',
'contract_abis'
) }}
WHERE
_inserted_timestamp >= CURRENT_DATE - INTERVAL '30 days' -- this won't let us retry the same contract within 30 days
AND abi_data :data :result :: STRING <> 'Max rate limit reached'
)
UNION ALL
SELECT
contract_address,
block_number
FROM
retry
)
SELECT
*
FROM
FINAL qualify ROW_NUMBER() over (
PARTITION BY contract_address
ORDER BY
block_number DESC
) = 1

View File

@ -1,67 +0,0 @@
{{ config(
materialized = 'incremental',
unique_key = "contract_address",
full_refresh = false,
tags = ['non_realtime']
) }}
WITH base AS (
SELECT
contract_address
FROM
{{ ref('silver__relevant_contracts') }}
WHERE
total_interaction_count >= 10000
{% if is_incremental() %}
and contract_address not in (
SELECT
contract_address
FROM
{{ this }}
WHERE
abi_data :data :result :: STRING <> 'Max rate limit reached')
{% endif %}
order by total_interaction_count desc
LIMIT
400
), all_contracts AS (
SELECT
contract_address
FROM
base
UNION
SELECT
contract_address
FROM
{{ ref('_retry_abis') }}
),
row_nos AS (
SELECT
contract_address,
ROW_NUMBER() over (
ORDER BY
contract_address
) AS row_no
FROM
all_contracts
),
batched AS ({% for item in range(501) %}
SELECT
rn.contract_address, live.udf_api('GET', CONCAT('https://api.basescan.org/api?module=contract&action=getabi&address=', rn.contract_address, '&apikey={key}'),{ 'User-Agent': 'FlipsideStreamline' },{}, 'Vault/prod/block_explorers/base_scan') AS abi_data, SYSDATE() AS _inserted_timestamp
FROM
row_nos rn
WHERE
row_no = {{ item }}
{% if not loop.last %}
UNION ALL
{% endif %}
{% endfor %})
SELECT
contract_address,
abi_data,
_inserted_timestamp
FROM
batched

View File

@ -1,23 +0,0 @@
version: 2
models:
- name: bronze_api__contract_abis
columns:
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- VARCHAR
- dbt_expectations.expect_column_values_to_match_regex:
regex: "^(0x)[0-9a-fA-F]{40}$"

View File

@ -1,130 +0,0 @@
{{ config(
materialized = 'incremental',
unique_key = "contract_address",
full_refresh = false,
tags = ['non_realtime']
) }}
WITH base AS (
SELECT
contract_address,
latest_event_block AS latest_block
FROM
{{ ref('silver__relevant_contracts') }}
WHERE
total_event_count >= 25
{% if is_incremental() %}
AND contract_address NOT IN (
SELECT
contract_address
FROM
{{ this }}
)
{% endif %}
ORDER BY
total_event_count DESC
LIMIT
100
), function_sigs AS (
SELECT
'0x313ce567' AS function_sig,
'decimals' AS function_name
UNION
SELECT
'0x06fdde03',
'name'
UNION
SELECT
'0x95d89b41',
'symbol'
),
all_reads AS (
SELECT
*
FROM
base
JOIN function_sigs
ON 1 = 1
),
ready_reads AS (
SELECT
contract_address,
latest_block,
function_sig,
RPAD(
function_sig,
64,
'0'
) AS input,
utils.udf_json_rpc_call(
'eth_call',
[{'to': contract_address, 'from': null, 'data': input}, utils.udf_int_to_hex(latest_block)],
concat_ws(
'-',
contract_address,
input,
latest_block
)
) AS rpc_request
FROM
all_reads
),
batch_reads AS (
SELECT
ARRAY_AGG(rpc_request) AS batch_rpc_request
FROM
ready_reads
),
node_call AS (
SELECT
*,
live.udf_api(
'POST',
CONCAT(
'{service}',
'/',
'{Authentication}'
),{},
batch_rpc_request,
'Vault/prod/base/quicknode/mainnet'
) AS response
FROM
batch_reads
WHERE
EXISTS (
SELECT
1
FROM
ready_reads
LIMIT
1
)
), flat_responses AS (
SELECT
VALUE :id :: STRING AS call_id,
VALUE :result :: STRING AS read_result
FROM
node_call,
LATERAL FLATTEN (
input => response :data
)
)
SELECT
SPLIT_PART(
call_id,
'-',
1
) AS contract_address,
SPLIT_PART(
call_id,
'-',
3
) AS block_number,
LEFT(SPLIT_PART(call_id, '-', 2), 10) AS function_sig,
NULL AS function_input,
read_result,
SYSDATE() :: TIMESTAMP AS _inserted_timestamp
FROM
flat_responses

View File

@ -1,19 +0,0 @@
version: 2
models:
- name: bronze_api__token_reads
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- CONTRACT_ADDRESS
- FUNCTION_SIG
columns:
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ

View File

@ -2,7 +2,7 @@
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "state_block_number",
tags = ['ethereum','non_realtime']
tags = ['bronze','ethereum']
) }}
SELECT

View File

@ -1,25 +0,0 @@
{{ config(
materialized = 'view'
) }}
SELECT
system_created_at,
insert_date,
blockchain,
address,
creator,
label_type,
label_subtype,
address_name,
project_name,
_is_deleted,
modified_timestamp,
labels_combined_id
FROM
{{ source(
'silver_crosschain',
'labels_combined'
) }}
WHERE
blockchain = 'base'
AND address LIKE '0x%'

View File

@ -1,80 +0,0 @@
{{ config (
materialized = "view",
tags = ['overflowed_traces']
) }}
{% for item in range(
1,
11
) %}
SELECT
o.file_name,
f.block_number,
f.index_vals,
f.path,
f.key,
f.value_
FROM
(
SELECT
file_name,
file_url,
index_cols,
[overflowed_block, overflowed_tx] AS index_vals
FROM
(
SELECT
block_number,
POSITION,
file_name,
file_url,
index_cols,
VALUE [0] AS overflowed_block,
VALUE [1] AS overflowed_tx,
block_number = overflowed_block
AND POSITION = overflowed_tx AS missing
FROM
(
SELECT
block_number,
POSITION,
file_name,
file_url,
index_cols,
utils.udf_detect_overflowed_responses(
file_url,
index_cols
) AS index_vals
FROM
{{ ref("bronze__potential_overflowed_traces") }}
WHERE
row_no = {{ item }}
),
LATERAL FLATTEN (
input => index_vals
)
)
WHERE
missing = TRUE
) o,
TABLE(
utils.udtf_flatten_overflowed_responses(
o.file_url,
o.index_cols,
[o.index_vals]
)
) f
WHERE
NOT IS_OBJECT(
f.value_
)
AND NOT IS_ARRAY(
f.value_
)
AND NOT IS_NULL_VALUE(
f.value_
) {% if not loop.last %}
UNION ALL
{% endif %}
{% endfor %}

View File

@ -1,80 +0,0 @@
{{ config (
materialized = "view",
tags = ['overflowed_traces']
) }}
WITH impacted_blocks AS (
SELECT
blocks_impacted_array
FROM
{{ ref("silver_observability__traces_completeness") }}
ORDER BY
test_timestamp DESC
LIMIT
1
), all_missing AS (
SELECT
DISTINCT VALUE :: INT AS block_number
FROM
impacted_blocks,
LATERAL FLATTEN (
input => blocks_impacted_array
)
),
all_txs AS (
SELECT
block_number,
POSITION AS tx_position,
tx_hash
FROM
{{ ref("silver__transactions") }}
JOIN all_missing USING (block_number)
),
missing_txs AS (
SELECT
DISTINCT txs.block_number,
txs.tx_position,
file_name
FROM
all_txs txs
LEFT JOIN {{ source(
'base_gold',
'fact_traces'
) }}
tr2 USING (
block_number,
tx_position
)
JOIN {{ ref("streamline__traces_complete") }} USING (block_number)
LEFT JOIN {{ source(
'base_silver',
'overflowed_traces'
) }}
ot USING (
block_number,
tx_position
)
WHERE
tr2.block_number IS NULL
AND ot.block_number IS NULL
)
SELECT
block_number,
tx_position AS POSITION,
file_name,
build_scoped_file_url(
@streamline.bronze.BASE_SERVERLESS_PROD,
file_name
) AS file_url,
['block_number', 'array_index'] AS index_cols,
ROW_NUMBER() over (
ORDER BY
block_number ASC,
POSITION ASC
) AS row_no
FROM
missing_txs
ORDER BY
block_number ASC,
POSITION ASC

View File

@ -1,26 +0,0 @@
{{ config (
materialized = 'view'
) }}
SELECT
asset_id,
symbol,
NAME,
decimals,
blockchain,
is_deprecated,
provider,
source,
_inserted_timestamp,
inserted_timestamp,
modified_timestamp,
complete_native_asset_metadata_id,
_invocation_id
FROM
{{ source(
'silver_crosschain',
'complete_native_asset_metadata'
) }}
WHERE
blockchain = 'ethereum'
AND symbol = 'ETH'

View File

@ -1,29 +0,0 @@
{{ config (
materialized = 'view'
) }}
SELECT
HOUR,
asset_id,
symbol,
NAME,
decimals,
price,
blockchain,
is_imputed,
is_deprecated,
provider,
source,
_inserted_timestamp,
inserted_timestamp,
modified_timestamp,
complete_native_prices_id,
_invocation_id
FROM
{{ source(
'silver_crosschain',
'complete_native_prices'
) }}
WHERE
blockchain = 'ethereum'
AND symbol = 'ETH'

View File

@ -1,26 +0,0 @@
{{ config (
materialized = 'view'
) }}
SELECT
asset_id,
token_address,
NAME,
symbol,
platform,
platform_id,
provider,
source,
_inserted_timestamp,
inserted_timestamp,
modified_timestamp,
complete_provider_asset_metadata_id,
_invocation_id
FROM
{{ source(
'silver_crosschain',
'complete_provider_asset_metadata'
) }}
WHERE
platform = 'Base'
-- platforms specific to Base

View File

@ -1,24 +0,0 @@
{{ config (
materialized = 'view'
) }}
SELECT
asset_id,
recorded_hour,
OPEN,
high,
low,
CLOSE,
provider,
source,
_inserted_timestamp,
inserted_timestamp,
modified_timestamp,
complete_provider_prices_id,
_invocation_id
FROM
{{ source(
'silver_crosschain',
'complete_provider_prices'
) }}
-- prices for all ids

View File

@ -1,28 +0,0 @@
{{ config (
materialized = 'view'
) }}
SELECT
token_address,
asset_id,
symbol,
NAME,
decimals,
blockchain,
blockchain_name,
blockchain_id,
is_deprecated,
provider,
source,
_inserted_timestamp,
inserted_timestamp,
modified_timestamp,
complete_token_asset_metadata_id,
_invocation_id
FROM
{{ source(
'silver_crosschain',
'complete_token_asset_metadata'
) }}
WHERE
blockchain = 'base'

View File

@ -1,31 +0,0 @@
{{ config (
materialized = 'view'
) }}
SELECT
HOUR,
token_address,
asset_id,
symbol,
NAME,
decimals,
price,
blockchain,
blockchain_name,
blockchain_id,
is_imputed,
is_deprecated,
provider,
source,
_inserted_timestamp,
inserted_timestamp,
modified_timestamp,
complete_token_prices_id,
_invocation_id
FROM
{{ source(
'silver_crosschain',
'complete_token_prices'
) }}
WHERE
blockchain = 'base'

View File

@ -1,5 +0,0 @@
{% docs base_max_block %}
The max block on Base this batch relates to.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_min_block %}
The min block on Base this batch relates to.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_batch_size %}
Total Base Txs included within batch.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_batch_root %}
Root of batch, either for sumbission or state.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_l1_block_no %}
The Ethereum block number that contained the batch.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_l1_block_time %}
The timestamp of the Ethereum block that contained this batch.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_l1_fee_scalar %}
This value covers the change in L1 gas price between the time the transaction is submitted and when it is published.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_l1_gas_price %}
The gas price for L1 transactions when the transaction was processed.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_l1_gas_used %}
The gas used on L1 to publish the transaction.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_l1_state_batch %}
The batch index of when this block was included in the Ethereum state root. This column will be deprecated 8/7 and will be consolidated into a array column consisting of all L1 submission details.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_l1_state_tx_hash %}
The L1 tx hash of when this block was included in the Ethereum state root. This column will be deprecated 8/7 and will be consolidated into a array column consisting of all L1 submission details.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_l1_sub_batch %}
The batch index of when this block was submitted to L1. This column will be deprecated 8/7 and will be consolidated into a array column consisting of all L1 submission details.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_l1_sub_tx_hash %}
The L1 tx hash of when this block was submitted to L1. This column will be deprecated 8/7 and will be consolidated into a array column consisting of all L1 submission details.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_prev_total_elements %}
Confirmed blocks prior to this batch.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_block_header_json %}
This JSON column contains the block header details.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_blockchain %}
The blockchain on which transactions are being confirmed.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_blocks_hash %}
The hash of the block header for a given block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_blocks_nonce %}
Block nonce is a value used during mining to demonstrate proof of work for a given block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_blocks_table_doc %}
This table contains block level data for the Base Blockchain. This table can be used to analyze trends at a block level, for example gas fees vs. total transactions over time. For more information on EVM transactions, please see [Etherscan Resources](https://etherscan.io/directory/Learning_Resources/Ethereum) or [The Ethereum Organization](https://ethereum.org/en/developers/docs/blocks/)
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_difficulty %}
The effort required to mine the block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_extra_data %}
Any data included by the validator for a given block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_gas_limit %}
Total gas limit provided by all transactions in the block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_gas_used %}
Total gas used in the block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_miner %}
Miner who successfully added a given block to the blockchain.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_network %}
The network on the blockchain used by a transaction.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_parent_hash %}
The hash of the block from which a given block is generated. Also known as the parent block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_receipts_root %}
The root of the state trie.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_sha3_uncles %}
The mechanism which Ethereum Javascript RLP encodes an empty string.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_size %}
Block size, which is determined by a given block's gas limit.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_total_difficulty %}
Total difficulty of the chain at a given block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_tx_count %}
Total number of transactions within a block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs base_uncle_blocks %}
Uncle blocks occur when two blocks are mined and broadcasted at the same time, with the same block number. The block validated across the most nodes will be added to the primary chain, and the other one becomes an uncle block. Miners do receive rewards for uncle blocks.
{% enddocs %}

View File

@ -1,83 +0,0 @@
{% docs evm_bridge_table_doc %}
A convenience table that aggregates bridge activity from event_logs, traces and transfers, including bridge deposits and transfers sent from the following protocols: ACROSS, AXELAR, CELER, CBRIDGE, DLN, DEBRIDGE, HOP, MESON, STARGATE, SYMBIOSIS, SYNAPSE, WORMHOLE along with other helpful columns, including an amount USD where available. Note, this table only includes records for the protocols listed above with live, onchain bridge activity and may not represent the complete bridging picture.
{% enddocs %}
{% docs evm_bridge_platform %}
The platform or protocol from which the bridge transaction or event originates.
{% enddocs %}
{% docs evm_bridge_origin_from %}
The from address where the transaction originated from. This may be an EOA or contract address, however in most cases this is the user that initiated the bridge deposit or transfer.
{% enddocs %}
{% docs evm_bridge_sender %}
The address that initiated the bridge deposit or transfer. This address is the sender of the tokens/assets being bridged to the destination chain. This may be an EOA or contract address.
{% enddocs %}
{% docs evm_bridge_receiver %}
The designated address set to receive the deposit or transfer. This may be an EOA or contract address.
{% enddocs %}
{% docs evm_bridge_destination_chain_receiver %}
The designated address set to receive the bridged tokens on the target chain after the completion of the bridge transaction. For non-evm chains, the hex address is decoded/encoded to match the data format of the destination chain, where possible. This may be an EOA or contract address.
{% enddocs %}
{% docs evm_bridge_destination_chain %}
The name of the blockchain network to which the assets are being bridged. It could be any EVM compatible chain or other blockchain networks that the bridging protocol supports.
{% enddocs %}
{% docs evm_bridge_destination_chain_id %}
The numeric identifier associated with the destination blockchain network. This is specific to the chain and helps in uniquely identifying it.
{% enddocs %}
{% docs evm_bridge_address %}
The address of the contract responsible for handling the bridge deposit or transfer. This contract mediates the transfer and ensures that assets are sent and received appropriately.
{% enddocs %}
{% docs evm_bridge_token_address %}
The address associated with the token that is being bridged. It provides a unique identifier for the token within its origin blockchain.
{% enddocs %}
{% docs evm_bridge_token_symbol %}
The symbol representing the token being bridged. This provides a shorthand representation of the token.
{% enddocs %}
{% docs evm_bridge_amount_unadj %}
The raw, non-decimal adjusted amount of tokens involved in the bridge transaction.
{% enddocs %}
{% docs evm_bridge_amount %}
The decimal adjusted amount of tokens involved in the bridge transaction, where available.
{% enddocs %}
{% docs evm_bridge_amount_usd %}
The value of the bridged tokens in USD at the time of the bridge transaction, where available.
{% enddocs %}

View File

@ -1,381 +0,0 @@
{% docs complete_lending_borrow_rate_mode %}
The rate mode the user is swapping from. Stable: 1, Variable: 2. Borrowers can switch between the stable and variable rate at any time. Stable rates act as a fixed rate in the short-term, but can be re-balanced in the long-term in response to changes in market conditions. The variable rate is the rate based on the offer and demand. The stable rate, as its name indicates, will remain pretty stable and its the best option to plan how much interest you will have to pay. The variable rate will change over time and could be the optimal rate depending on market conditions.
{% enddocs %}
{% docs complete_lending_borrow_rate_stable %}
The stable interest rate for borrowing assets.
{% enddocs %}
{% docs complete_lending_borrow_rate_variable %}
The variable interest rate for borrowing assets.
{% enddocs %}
{% docs complete_lending_collateral_complete_lending_token %}
The interest bearing token that's burned when a liquidation occurs.
{% enddocs %}
{% docs complete_lending_collateral_asset %}
The asset provided as collateral, which can be liquidated.
{% enddocs %}
{% docs complete_lending_data_provider %}
The protocol data provider contract address.
{% enddocs %}
{% docs complete_lending_debt_complete_lending_token %}
The interest bearing token representing the debt.
{% enddocs %}
{% docs complete_lending_debt_asset %}
The debt asset, which the user borrowed.
{% enddocs %}
{% docs complete_lending_debt_to_cover_amount %}
The amount of debt the user must cover.
{% enddocs %}
{% docs complete_lending_debt_to_cover_amount_usd %}
The amount of debt the user must cover, valued in USD.
{% enddocs %}
{% docs complete_lending_depositor_address %}
The depositor's address.
{% enddocs %}
{% docs complete_lending_end_voting_period %}
The block number in which the voting period ends.
{% enddocs %}
{% docs complete_lending_flashloan_amount %}
The amount of assets flash loaned.
{% enddocs %}
{% docs complete_lending_flashloan_amount_usd %}
The value of the flash loan amount, in USD.
{% enddocs %}
{% docs complete_lending_governance_contract %}
The governance contract address.
{% enddocs %}
{% docs complete_lending_initiator_address %}
The address that initiated the flash loan.
{% enddocs %}
{% docs complete_lending_issued_tokens %}
The amount of tokens that the user is depositing.
{% enddocs %}
{% docs complete_lending_lending_pool_contract %}
The address of the lending pool.
{% enddocs %}
{% docs complete_lending_liquidated_amount %}
The amount of asset liquidated.
{% enddocs %}
{% docs complete_lending_liquidated_amount_usd %}
The value of the liquidated asset, in USD.
{% enddocs %}
{% docs complete_lending_liquidator %}
The address that initiated the liquidation call.
{% enddocs %}
{% docs complete_lending_market %}
The asset contract for the applicable market.
{% enddocs %}
{% docs complete_lending_payer %}
The address that initiated the repayment.
{% enddocs %}
{% docs complete_lending_premium_amount %}
The flash loan fee, changeable via the normal governance process, decimal adjusted.
{% enddocs %}
{% docs complete_lending_premium_amount_usd %}
The flash loan fee, valued in USD.
{% enddocs %}
{% docs complete_lending_proposal_id %}
The unique ID representing a proposal.
{% enddocs %}
{% docs complete_lending_proposal_tx %}
The transaction confirming a proposal submission.
{% enddocs %}
{% docs complete_lending_proposer %}
The user's address that submitted the proposal.
{% enddocs %}
{% docs complete_lending_repayed_tokens %}
The amount of tokens repaid.
{% enddocs %}
{% docs complete_lending_repayed_usd %}
The value of repaid tokens, in USD.
{% enddocs %}
{% docs complete_lending_stable_debt_token_address %}
Debt tokens are interest-accruing tokens that are minted and burned on borrow and repay, representing a debt to the protocol with a stable interest rate.
{% enddocs %}
{% docs complete_lending_start_voting_period %}
The block number in which the voting period begins.
{% enddocs %}
{% docs complete_lending_status %}
The proposal's status.
{% enddocs %}
{% docs complete_lending_supplied_usd %}
The value of the asset in USD that the user is depositing.
{% enddocs %}
{% docs complete_lending_supply_rate %}
The interest rate for supplying assets to the protocol.
{% enddocs %}
{% docs complete_lending_support %}
A value indicating their vote (For: true, Against: false).
{% enddocs %}
{% docs complete_lending_target_address %}
The address receiving the flash loan.
{% enddocs %}
{% docs complete_lending_targets %}
List of the targeted addresses by proposal transactions.
{% enddocs %}
{% docs complete_lending_token %}
The interest bearing token contract.
{% enddocs %}
{% docs complete_lending_total_liquidity_token %}
The total supply of liquidity tokens.
{% enddocs %}
{% docs complete_lending_total_liquidity_usd %}
The total value of liquidity tokens, in USD.
{% enddocs %}
{% docs complete_lending_total_stable_debt_token %}
The total supply of debt tokens, representing a debt to the protocol with a stable interest rate.
{% enddocs %}
{% docs complete_lending_total_stable_debt_usd %}
The total USD value of debt tokens, representing a debt to the protocol with a stable interest rate.
{% enddocs %}
{% docs complete_lending_total_variable_debt_token %}
The total supply of debt tokens, representing a debt to the protocol with a variable interest rate.
{% enddocs %}
{% docs complete_lending_total_variable_debt_usd %}
The total USD value of debt tokens, representing a debt to the protocol with a variable interest rate.
{% enddocs %}
{% docs complete_lending_utilization_rate %}
The percentage of assets loaned out.
{% enddocs %}
{% docs complete_lending_variable_debt_token_address %}
Debt tokens are interest-accruing tokens that are minted and burned on borrow and repay, representing a debt to the protocol with a variable interest rate.
{% enddocs %}
{% docs complete_lending_version %}
The contract version. Example: Aave AMM, Aave v1, Aave v2
{% enddocs %}
{% docs complete_lending_withdrawn_tokens %}
The amount of tokens withdrawn.
{% enddocs %}
{% docs complete_lending_withdrawn_usd %}
The value of withdrawn tokens, in USD.
{% enddocs %}
{% docs complete_lending_platform %}
The specific protocol where lending event occurred.
{% enddocs %}
{% docs complete_lending_protocol_token %}
The protocol's specific lending asset token, ie cWBTC or aETHUni.
{% enddocs %}
{% docs complete_lending_borrower %}
Address that initiated the borrow event.
{% enddocs %}
{% docs complete_lending_amount %}
The decimal adjusted amount of tokens involved in the lending transaction, where available.
{% enddocs %}
{% docs complete_lending_amount_usd %}
The value of the tokens in USD at the time of the lending transaction, where available.
{% enddocs %}
{% docs complete_lending_token_address %}
The address of the token associated with the lending action.
{% enddocs %}
{% docs complete_lending_token_symbol %}
The symbol of the token associated with the lending action.
{% enddocs %}
{% docs complete_lending_depositor %}
Address that initiated a deposit event.
{% enddocs %}
{% docs complete_lending_amount_unadj %}
The non-decimal adjusted amount of tokens involved in the lending transaction.
{% enddocs %}
{% docs complete_lending_premium_amount_unadj %}
The flash loan fee, changeable via the normal governance process, non-decimal adjusted.
{% enddocs %}
{% docs complete_lending_flashloan_amount_unadj %}
The amount of assets flash loaned, non-decimal adjusted.
{% enddocs %}
{% docs complete_lending_flashloan_token %}
The flashloaned token address.
{% enddocs %}
{% docs complete_lending_flashloan_token_symbol %}
The flashloaned token symbol
{% enddocs %}
{% docs borrower %}
Its the address of the user who is Borrowing or repaying the loan, depending on the action.
{% enddocs %}

Some files were not shown because too many files have changed in this diff Show More