AN-5995/pol-migration (#437)

* initial set up

* macros and workflows

* wh

* observ exclusions

* defi and nft tags

* protocol tags

* sl

* refs

* docs

* package

* package

* 64

* curated tweaks

* sources

* v69

* v70
This commit is contained in:
drethereum 2025-05-15 09:12:03 -06:00 committed by GitHub
parent 3327061c16
commit 8bbd8ac3c5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
609 changed files with 1200 additions and 19117 deletions

View File

@ -0,0 +1,27 @@
name: dbt_alter_all_gha_tasks
run-name: dbt_alter_all_gha_tasks
on:
workflow_dispatch:
branches:
- "main"
inputs:
task_action:
type: choice
description: Action to perform on all tasks
required: true
options:
- RESUME
- SUSPEND
default: RESUME
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_alter_all_gha_tasks.yml@pre-release/v4-beta
with:
task_action: ${{ inputs.task_action }}
target: prod
secrets: inherit

View File

@ -0,0 +1,32 @@
name: dbt_alter_gha_tasks
run-name: dbt_alter_gha_tasks
on:
workflow_dispatch:
branches:
- "main"
inputs:
workflow_name:
type: string
description: Name of the workflow to perform the action on, no .yml extension
required: true
task_action:
type: choice
description: Action to perform
required: true
options:
- SUSPEND
- RESUME
default: SUSPEND
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_alter_gha_tasks.yml@pre-release/v4-beta
with:
workflow_name: ${{ inputs.workflow_name }}
task_action: ${{ inputs.task_action }}
target: prod
secrets: inherit

View File

@ -0,0 +1,19 @@
name: dbt_deploy_new_workflows
run-name: dbt_deploy_new_workflows
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Deploy New Github Actions
command: |
make deploy_new_gha_tasks DBT_TARGET=prod
secrets: inherit

View File

@ -5,75 +5,10 @@ on:
branches:
- "main"
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: refresh ddl for datashare
run: |
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
- name: checkout docs branch
run: |
git checkout -B docs origin/main
- name: generate dbt docs
run: dbt docs generate -t prod
- name: move files to docs directory
run: |
mkdir -p ./docs
cp target/{catalog.json,manifest.json,index.html} docs/
- name: clean up target directory
run: dbt clean
- name: check for changes
run: git status
- name: stage changed files
run: git add .
- name: commit changed files
run: |
git config user.email "abc@xyz"
git config user.name "github-actions"
git commit -am "Auto-update docs"
- name: push changes to docs
run: |
git push -f --set-upstream origin docs
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_docs_update.yml@pre-release/v4-beta
secrets: inherit

View File

@ -3,38 +3,20 @@ run-name: ${{ github.event.inputs.branch }}
on:
workflow_dispatch:
branches:
- "main"
inputs:
environment:
required: true
type: string
required: true
concurrency: ${{ github.workflow }}
jobs:
prepare_vars:
runs-on: ubuntu-latest
environment:
name: ${{ inputs.environment }}
outputs:
warehouse: ${{ steps.set_outputs.outputs.warehouse }}
steps:
- name: Set warehouse output
id: set_outputs
run: |
echo "warehouse=${{ vars.WAREHOUSE }}" >> $GITHUB_OUTPUT
called_workflow_template:
needs: prepare_vars
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt.yml@main
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_integration_test.yml@pre-release/v4-beta
with:
command: >
target: ${{ inputs.environment }}
command: |
dbt test --selector 'integration_tests'
environment: ${{ inputs.environment }}
warehouse: ${{ needs.prepare_vars.outputs.warehouse }}
secrets: inherit
notify-failure:
needs: [called_workflow_template]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -1,56 +0,0 @@
name: dbt_run_abi_refresh
run-name: dbt_run_abi_refresh
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Update ABI models
run: |
dbt run -m "polygon_models,tag:abis"
- name: Kick off decoded logs history, if there are new ABIs from users
run: |
dbt run-operation run_decoded_logs_history
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -1,12 +1,12 @@
name: dbt_run_adhoc
run-name: dbt_run_adhoc
run-name: ${{ inputs.dbt_command }}
on:
workflow_dispatch:
branches:
- "main"
inputs:
environment:
target:
type: choice
description: DBT Run Environment
required: true
@ -26,49 +26,18 @@ on:
default: DBT
dbt_command:
type: string
description: "DBT Run Command"
description: 'DBT Run Command'
required: true
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ inputs.warehouse }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_${{ inputs.environment }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
warehouse: ${{ inputs.warehouse }}
target: ${{ inputs.target }}
command_name: Run DBT Command
command: ${{ inputs.dbt_command }}
secrets: inherit

View File

@ -1,53 +0,0 @@
name: dbt_run_alter_gha_task
run-name: dbt_run_alter_gha_task
on:
workflow_dispatch:
branches:
- "main"
inputs:
workflow_name:
type: string
description: Name of the workflow to perform the action on, no .yml extension
required: true
task_action:
type: choice
description: Action to perform
required: true
options:
- SUSPEND
- RESUME
default: SUSPEND
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_alter_gha_tasks.yml@AN-4374/upgrade-dbt-1.7
with:
workflow_name: |
${{ inputs.workflow_name }}
task_action: |
${{ inputs.task_action }}
environment: workflow_prod
secrets: inherit
notify-failure:
needs: [called_workflow_template]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -3,80 +3,13 @@ run-name: dbt_run_dev_refresh
on:
workflow_dispatch:
schedule:
# Runs "at 3:53 UTC every Monday" (see https://crontab.guru)
- cron: 53 3 * * 1
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs_refresh:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run-operation run_sp_create_prod_clone
notify-failure:
needs: [run_dbt_jobs_refresh]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
run_dbt_jobs_udfs:
runs-on: ubuntu-latest
needs: run_dbt_jobs_refresh
environment:
name: workflow_dev
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run Recreate UDFs
run: |
dbt run-operation fsc_utils.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
dbt run -s livequery_models.deploy.core._live --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
notify-failure2:
needs: [run_dbt_jobs_udfs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_dev_refresh.yml@pre-release/v4-beta
secrets: inherit

View File

@ -3,50 +3,21 @@ run-name: dbt_run_full_observability
on:
workflow_dispatch:
schedule:
# Runs “At 12:00 on day-of-month 1.” (see https://crontab.guru)
- cron: "0 12 1 * *"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod_2xl
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
warehouse: DBT_EMERGENCY
command_name: Run Observability Models
command: |
dbt run --threads 2 --vars '{"MAIN_OBSERV_FULL_TEST_ENABLED":True}' -m "fsc_evm,tag:observability"
secrets: inherit
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"OBSERV_FULL_TEST":True}' -m "polygon_models,tag:observability"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -3,50 +3,17 @@ run-name: dbt_run_heal_models
on:
workflow_dispatch:
schedule:
# Runs at 04:45 on Wednesday (see https://crontab.guru)
- cron: "45 4 * * 3"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m "polygon_models,tag:heal" --vars '{"HEAL_MODEL":True}'
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
command_name: Run Heal Models
command: |
dbt run -m "$PROJECT_NAME,tag:heal" --vars '{"HEAL_MODEL":True}'
secrets: inherit

View File

@ -1,59 +0,0 @@
name: dbt_run_operation_reorg
run-name: dbt_run_operation_reorg
on:
workflow_dispatch:
schedule:
# Runs at minute 20 once per week on Monday (see https://crontab.guru)
- cron: "20 0 * * 1"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: List reorg models
id: list_models
run: |
reorg_model_list=$(dbt list --select "polygon_models,tag:reorg" --resource-type model --output name | grep '__' | awk -F'.' '{print $NF}' | tr '\n' ',' | sed 's/,$//')
echo "model_list=$reorg_model_list" >> $GITHUB_OUTPUT
- name: Execute block_reorg macro
run: |
dbt run-operation fsc_utils.block_reorg --args "{reorg_model_list: '${{ steps.list_models.outputs.model_list }}', hours: '169'}" && awk '/SQL status/ {print; next} /DELETE FROM/{getline; print} /\/\* {/ {print}' logs/dbt.log
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -1,51 +0,0 @@
name: dbt_run_overflowed_traces
run-name: dbt_run_overflowed_traces
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m "polygon_models,tag:overflowed_traces" --vars '{"OVERFLOWED_TRACES":True}'
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -0,0 +1,22 @@
name: dbt_run_scheduled_abis
run-name: dbt_run_scheduled_abis
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run ABI Models
command: |
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:gold,tag:abis"
command_name_2: Kick off decoded logs history, if there are new user submitted ABIs
command_2: |
dbt run-operation fsc_evm.run_decoded_logs_history
secrets: inherit

View File

@ -6,46 +6,14 @@ on:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m "polygon_models,tag:curated"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
command_name: Run Curated Models
command: |
dbt run -m "$PROJECT_NAME,tag:curated" "fsc_evm,tag:curated"
secrets: inherit

View File

@ -0,0 +1,19 @@
name: dbt_run_scheduled_decoder
run-name: dbt_run_scheduled_decoder
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Decoder Models
command: |
dbt run -m "fsc_evm,tag:bronze,tag:decoded_logs" "fsc_evm,tag:silver,tag:decoded_logs" "fsc_evm,tag:gold,tag:decoded_logs"
secrets: inherit

View File

@ -0,0 +1,19 @@
name: dbt_run_scheduled_main
run-name: dbt_run_scheduled_main
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Main Models
command: |
dbt run -m "fsc_evm,tag:bronze,tag:core" "fsc_evm,tag:silver,tag:core" "fsc_evm,tag:gold,tag:core" "fsc_evm,tag:silver,tag:prices" "fsc_evm,tag:gold,tag:prices" "fsc_evm,tag:silver,tag:labels" "fsc_evm,tag:gold,tag:labels" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
secrets: inherit

View File

@ -1,51 +0,0 @@
name: dbt_run_scheduled_non_realtime
run-name: dbt_run_scheduled_non_realtime
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "polygon_models,tag:non_realtime" "polygon_models,tag:streamline_decoded_logs_complete" "polygon_models,tag:streamline_decoded_logs_realtime"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -0,0 +1,20 @@
name: dbt_run_scheduled_scores
run-name: dbt_run_scheduled_scores
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Scores Models
command: |
dbt run -m "fsc_evm,tag:scores"
secrets: inherit

View File

@ -6,50 +6,15 @@ on:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "polygon_models,tag:streamline_core_complete" "polygon_models,tag:streamline_core_realtime" "polygon_models,tag:streamline_core_complete_receipts" "polygon_models,tag:streamline_core_realtime_receipts" "polygon_models,tag:streamline_core_complete_confirm_blocks" "polygon_models,tag:streamline_core_realtime_confirm_blocks"
- name: Run Chainhead Tests
run: |
dbt test -m "polygon_models,tag:chainhead"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
command_name: Run Chainhead Models
command: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:chainhead" "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash"
dbt test -m "fsc_evm,tag:chainhead"
secrets: inherit

View File

@ -1,56 +0,0 @@
name: dbt_run_streamline_decoded_logs_history
run-name: dbt_run_streamline_decoded_logs_history
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Update complete table
run: |
dbt run -m "polygon_models,tag:streamline_decoded_logs_complete"
- name: Decode historical logs
run: |
dbt run-operation decoded_logs_history --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -1,51 +0,0 @@
name: dbt_run_streamline_decoder
run-name: dbt_run_streamline_decoder
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m "polygon_models,tag:decoded_logs"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -0,0 +1,22 @@
name: dbt_run_streamline_decoder_history
run-name: dbt_run_streamline_decoder_history
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Decoder Complete
command: |
dbt run -m "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete"
command_name_2: Run Streamline Decoder History
command_2: |
dbt run-operation fsc_evm.decoded_logs_history --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
secrets: inherit

View File

@ -0,0 +1,19 @@
name: dbt_run_streamline_history
run-name: dbt_run_streamline_history
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run History Models
command: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:history" --exclude "fsc_evm,tag:receipts_by_hash"
secrets: inherit

View File

@ -1,76 +0,0 @@
name: dbt_run_streamline_history_adhoc
run-name: dbt_run_streamline_history_adhoc
on:
workflow_dispatch:
branches:
- "main"
inputs:
environment:
type: choice
description: DBT Run Environment
required: true
options:
- dev
- prod
- prod_backfill
default: dev
warehouse:
type: choice
description: Snowflake warehouse
required: true
options:
- DBT
- DBT_CLOUD
- DBT_EMERGENCY
default: DBT
dbt_command:
type: choice
description: "DBT Run Command"
required: true
options:
- dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "polygon_models,tag:streamline_core_complete" "polygon_models,tag:streamline_core_history" "polygon_models,tag:streamline_core_complete_receipts" "polygon_models,tag:streamline_core_history_receipts" "polygon_models,tag:streamline_core_complete_confirm_blocks" "polygon_models,tag:streamline_core_history_confirm_blocks"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ inputs.warehouse }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_${{ inputs.environment }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -3,50 +3,21 @@ run-name: dbt_test_daily
on:
workflow_dispatch:
schedule:
# Runs "at 9:00 UTC" (see https://crontab.guru)
- cron: "0 9 * * *"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_test
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt test --exclude "polygon_models,tag:full_test" "polygon_models,tag:recent_test" "polygon_models,tag:gha_tasks" livequery_models
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
target: test
command_name: Build Daily Testing Views
command: |
dbt run -m "fsc_evm,tag:daily_test"
command_name_2: Run Daily Tests (all tests excluding full, recent and misc. others)
command_2: |
dbt test --exclude "fsc_evm,tag:full_test" "fsc_evm,tag:recent_test" "fsc_evm,tag:gha_tasks" livequery_models
secrets: inherit

View File

@ -6,47 +6,16 @@ on:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_test
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m "polygon_models,tag:observability"
dbt test -m "polygon_models,tag:recent_test"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
target: test
command_name: Run Observability & Recent Tests
command: |
dbt run -m "fsc_evm,tag:observability"
dbt test -m "fsc_evm,tag:recent_test"
secrets: inherit

View File

@ -3,50 +3,21 @@ run-name: dbt_test_monthly
on:
workflow_dispatch:
schedule:
# Runs “28th of month at 1PM” (see https://crontab.guru)
- cron: "0 13 28 * *"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod_2xl
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt test -m "polygon_models,tag:full_test"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
warehouse: DBT_EMERGENCY
command_name: Build Full Testing Views
command: |
dbt run -m "fsc_evm,tag:full_test"
command_name_2: Run Full Tests
command_2: |
dbt test -m "fsc_evm,tag:full_test"
secrets: inherit

View File

@ -1,27 +0,0 @@
name: Slack Notification
on:
workflow_call:
secrets:
SLACK_WEBHOOK_URL:
required: true
jobs:
notify:
runs-on: ubuntu-latest
environment: workflow_prod
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Install dependencies
run: pip install requests
- name: Send Slack notification
run: python python/slack_alert.py
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

2
.gitignore vendored
View File

@ -18,3 +18,5 @@ dbt-env/
.env
.user.yml
/package-lock.yml

109
Makefile Normal file
View File

@ -0,0 +1,109 @@
DBT_TARGET ?= dev
RECEIPTS_BY_HASH_ENABLED ?= false
cleanup_time:
@set -e; \
rm -f package-lock.yml && dbt clean && dbt deps
deploy_gha_workflows_table:
@set -e; \
echo "Collecting workflow names..." ; \
WORKFLOW_VALUES="" ; \
for file in $$(find .github/workflows -name "*.yml" -type f); do \
filename=$$(basename "$$file" .yml) ; \
if [ -z "$$WORKFLOW_VALUES" ]; then \
WORKFLOW_VALUES="('$$filename')" ; \
else \
WORKFLOW_VALUES="$$WORKFLOW_VALUES,('$$filename')" ; \
fi ; \
done ; \
echo "Found workflows: $$WORKFLOW_VALUES" ; \
dbt run-operation create_workflow_table --args "{\"workflow_values\": \"$$WORKFLOW_VALUES\"}" -t $(DBT_TARGET)
deploy_gha_tasks:
@set -e; \
make deploy_gha_workflows_table DBT_TARGET=$(DBT_TARGET); \
dbt run -s livequery_models.deploy.marketplace.github --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET); \
dbt run-operation fsc_evm.create_gha_tasks --vars '{"RESUME_GHA_TASKS":True}' -t $(DBT_TARGET)
deploy_new_gha_tasks:
@set -e; \
make deploy_gha_workflows_table DBT_TARGET=$(DBT_TARGET); \
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET); \
dbt run-operation fsc_evm.create_gha_tasks --vars '{"RESUME_GHA_TASKS":True}' -t $(DBT_TARGET)
deploy_livequery:
@set -e; \
dbt run-operation fsc_evm.drop_livequery_schemas --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
dbt run -m livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
dbt run-operation fsc_evm.livequery_grants --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET)
deploy_chain_phase_1:
@set -e; \
dbt run -m livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
dbt run-operation fsc_evm.livequery_grants --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
dbt run-operation fsc_evm.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
dbt run-operation fsc_evm.call_sample_rpc_node -t $(DBT_TARGET); \
if [ "$(DBT_TARGET)" != "prod" ]; then \
if [ "$(RECEIPTS_BY_HASH_ENABLED)" = "true" ]; then \
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
dbt test -m "fsc_evm,tag:chainhead"; \
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True, "MAIN_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
else \
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts_by_hash" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
dbt test -m "fsc_evm,tag:chainhead"; \
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True, "MAIN_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
fi; \
else \
if [ "$(RECEIPTS_BY_HASH_ENABLED)" = "true" ]; then \
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts" --full-refresh --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
dbt test -m "fsc_evm,tag:chainhead"; \
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
else \
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts_by_hash" --full-refresh --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
dbt test -m "fsc_evm,tag:chainhead"; \
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
fi; \
fi; \
echo "# wait ~10 minutes"; \
echo "# run deploy_chain_phase_2"
deploy_chain_phase_2:
@set -e; \
if [ "$(DBT_TARGET)" != "prod" ]; then \
dbt run -m "fsc_evm,tag:phase_2" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_BRONZE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
else \
dbt run -m "fsc_evm,tag:phase_2" --full-refresh --vars '{"GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_BRONZE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
fi; \
echo "# wait ~10 minutes"; \
echo "# run deploy_chain_phase_3"
deploy_chain_phase_3:
@set -e; \
if [ "$(DBT_TARGET)" != "prod" ]; then \
dbt run -m "fsc_evm,tag:phase_2" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:phase_3" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
else \
dbt run -m "fsc_evm,tag:phase_2" -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:phase_3" --full-refresh --vars '{"GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
fi; \
echo "# wait ~10 minutes"; \
echo "# run deploy_chain_phase_4"
deploy_chain_phase_4:
@set -e; \
if [ "$(DBT_TARGET)" != "prod" ]; then \
dbt run -m "fsc_evm,tag:phase_3" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:phase_4" --full-refresh -t $(DBT_TARGET); \
else \
dbt run -m "fsc_evm,tag:phase_3" -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:phase_4" --full-refresh -t $(DBT_TARGET); \
make deploy_gha_tasks DBT_TARGET=$(DBT_TARGET); \
fi; \
.PHONY: cleanup_time deploy_gha_workflows_table deploy_gha_tasks deploy_new_gha_tasks deploy_livequery deploy_chain_phase_1 deploy_chain_phase_2 deploy_chain_phase_3 deploy_chain_phase_4

View File

@ -1,8 +0,0 @@
workflow_name,workflow_schedule
dbt_run_scheduled_non_realtime,"5 * * * *"
dbt_run_streamline_chainhead,"25,55 * * * *"
dbt_run_streamline_decoder,"15 * * * *"
dbt_run_scheduled_curated,"20 */6 * * *"
dbt_test_intraday,"20 */4 * * *"
dbt_run_streamline_decoded_logs_history,"24 14 * * 6"
dbt_run_abi_refresh,"29 23 * * *"
1 workflow_name workflow_schedule
2 dbt_run_scheduled_non_realtime 5 * * * *
3 dbt_run_streamline_chainhead 25,55 * * * *
4 dbt_run_streamline_decoder 15 * * * *
5 dbt_run_scheduled_curated 20 */6 * * *
6 dbt_test_intraday 20 */4 * * *
7 dbt_run_streamline_decoded_logs_history 24 14 * * 6
8 dbt_run_abi_refresh 29 23 * * *

View File

@ -0,0 +1,3 @@
block_number
31697306
31697810
1 block_number
2 31697306
3 31697810

View File

@ -17,6 +17,8 @@ test-paths: ["tests"]
seed-paths: ["data"]
macro-paths: ["macros"]
snapshot-paths: ["snapshots"]
docs-paths:
["dbt_packages/fsc_evm/doc_descriptions", "models/doc_descriptions", "models"]
target-path: "target" # directory which will store compiled SQL files
clean-targets: # directories to be removed by `dbt clean`
@ -24,20 +26,12 @@ clean-targets: # directories to be removed by `dbt clean`
- "dbt_modules"
- "dbt_packages"
models:
+copy_grants: true
+persist_docs:
relation: true
columns: true
+on_schema_change: "append_new_columns"
tests:
+store_failures: true # all tests
on-run-start:
- "{{ create_sps() }}"
- "{{ create_udfs() }}"
- "{{ create_tasks() }}"
on-run-end:
- '{{ apply_meta_as_tags(results) }}'
@ -56,25 +50,74 @@ query-comment:
# Configuring models
# Full documentation: https://docs.getdbt.com/docs/configuring-models
models:
polygon_models: # replace with the name of the chain
+copy_grants: true
+persist_docs:
relation: true
columns: true
+on_schema_change: "append_new_columns"
livequery_models:
+materialized: ephemeral
fsc_evm:
+enabled: false # disable fsc_evm package by default
+copy_grants: true
+persist_docs:
relation: true
columns: true
+on_schema_change: "append_new_columns"
main_package:
+enabled: false # disable top level package by default, enabled subpackages as needed
admin:
+enabled: true
core:
+enabled: true # enable subpackages, as needed
bronze:
+enabled: false
token_reads:
+enabled: true
github_actions:
+enabled: true
labels:
+enabled: true
observability:
+enabled: true
prices:
+enabled: true
utils:
+enabled: true
decoder_package:
+enabled: false
abis:
+enabled: true
decoded_logs:
+enabled: false
gold:
+enabled: true
silver:
+enabled: true
streamline:
+enabled: true
curated_package:
+enabled: false
stats:
+enabled: true
scores_package:
+enabled: false
# In this example config, we tell dbt to build all models in the example/ directory
# as tables. These settings can be overridden in the individual model files
# using the `{{ config(...) }}` macro.
vars:
STREAMLINE_INVOKE_STREAMS: False
"dbt_date:time_zone": GMT
STREAMLINE_INVOKE_STREAMS: False
UPDATE_UDFS_AND_SPS: False
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
UPDATE_SNOWFLAKE_TAGS: True
WAIT: 0
OBSERV_FULL_TEST: False
OVERFLOWED_RECEIPTS: False
OVERFLOWED_TRACES: False
BALANCES_START: 0
BALANCES_END: 15000000
HEAL_MODEL: False
HEAL_MODELS: []
DROP_UDFS_AND_SPS: False
START_GHA_TASKS: False
STUDIO_TEST_USER_ID: '{{ env_var("STUDIO_TEST_USER_ID", "98d15c30-9fa5-43cd-9c69-3d4c0bb269f5") }}'
#### STREAMLINE 2.0 BEGIN ####
@ -101,44 +144,3 @@ vars:
- DBT_CLOUD_POLYGON
#### STREAMLINE 2.0 END ####
#### FSC_EVM BEGIN ####
# Visit https://github.com/FlipsideCrypto/fsc-evm/wiki for more information on required and optional variables
### GLOBAL VARIABLES BEGIN ###
## REQUIRED
GLOBAL_PROD_DB_NAME: 'polygon'
GLOBAL_NODE_SECRET_PATH: 'Vault/prod/polygon/quicknode/mainnet'
GLOBAL_BLOCKS_PER_HOUR: 1700
GLOBAL_USES_STREAMLINE_V1: True
GLOBAL_USES_SINGLE_FLIGHT_METHOD: True
GLOBAL_CHAINHEAD_DELAY: 10
### GLOBAL VARIABLES END ###
### MAIN_PACKAGE VARIABLES BEGIN ###
### CORE ###
## REQUIRED
## OPTIONAL
# GOLD_FULL_REFRESH: True
# SILVER_FULL_REFRESH: True
# BLOCKS_COMPLETE_FULL_REFRESH: True
# CONFIRM_BLOCKS_COMPLETE_FULL_REFRESH: True
# TRACES_COMPLETE_FULL_REFRESH: True
# RECEIPTS_COMPLETE_FULL_REFRESH: True
# TRANSACTIONS_COMPLETE_FULL_REFRESH: True
### MAIN_PACKAGE VARIABLES END ###
### DECODER_PACKAGE VARIABLES BEGIN ###
## REQUIRED
## OPTIONAL
### DECODER_PACKAGE VARIABLES END ###
#### FSC_EVM END ####

View File

@ -1,7 +0,0 @@
{% macro create_tasks() %}
{% if var("UPDATE_UDFS_AND_SPS") %}
{% if target.database == 'POLYGON' %}
{# {{ task_get_abis() }}; #}
{% endif %}
{% endif %}
{% endmacro %}

View File

@ -1,23 +1,9 @@
{% macro create_udfs() %}
{% if var("UPDATE_UDFS_AND_SPS") %}
{% if var("UPDATE_UDFS_AND_SPS", false) %}
{% set sql %}
CREATE schema if NOT EXISTS silver;
{{ create_udtf_get_base_table(
schema = "streamline"
) }}
{% endset %}
{% do run_query(sql) %}
{% if target.database != "POLYGON_COMMUNITY_DEV" %}
{% set sql %}
{{ create_udf_get_chainhead() }}
{{ create_udf_bulk_json_rpc() }}
{{ create_udf_decode_array_string() }}
{{ create_udf_decode_array_object() }}
{{ create_udf_bulk_decode_logs() }}
{% endset %}
{% do run_query(sql) %}
{% endif %}
{{- fsc_utils.create_udfs() -}}
{% endif %}
{% endmacro %}

View File

@ -1,22 +0,0 @@
{% macro standard_predicate(
input_column = 'block_number'
) -%}
{%- set database_name = target.database -%}
{%- set schema_name = generate_schema_name(
node = model
) -%}
{%- set table_name = generate_alias_name(
node = model
) -%}
{%- set tmp_table_name = table_name ~ '__dbt_tmp' -%}
{%- set full_table_name = database_name ~ '.' ~ schema_name ~ '.' ~ table_name -%}
{%- set full_tmp_table_name = database_name ~ '.' ~ schema_name ~ '.' ~ tmp_table_name -%}
{{ full_table_name }}.{{ input_column }} >= (
SELECT
MIN(
{{ input_column }}
)
FROM
{{ full_tmp_table_name }}
)
{%- endmacro %}

View File

@ -1,124 +0,0 @@
{% macro decoded_logs_history(backfill_mode=false) %}
{%- set params = {
"sql_limit": var("DECODED_LOGS_HISTORY_SQL_LIMIT", 8000000),
"producer_batch_size": var("DECODED_LOGS_HISTORY_PRODUCER_BATCH_SIZE", 400000),
"worker_batch_size": var("DECODED_LOGS_HISTORY_WORKER_BATCH_SIZE", 100000)
} -%}
{% set wait_time = var("DECODED_LOGS_HISTORY_WAIT_TIME", 60) %}
{% set find_months_query %}
SELECT
DISTINCT date_trunc('month', block_timestamp)::date as month
FROM {{ ref('core__fact_blocks') }}
ORDER BY month ASC
{% endset %}
{% set results = run_query(find_months_query) %}
{% if execute %}
{% set months = results.columns[0].values() %}
{% for month in months %}
{% set view_name = 'decoded_logs_history_' ~ month.strftime('%Y_%m') %}
{% set create_view_query %}
create or replace view streamline.{{view_name}} as (
WITH target_blocks AS (
SELECT
block_number
FROM {{ ref('core__fact_blocks') }}
WHERE date_trunc('month', block_timestamp) = '{{month}}'::timestamp
),
new_abis AS (
SELECT
abi,
parent_contract_address,
event_signature,
start_block,
end_block
FROM {{ ref('silver__complete_event_abis') }}
{% if not backfill_mode %}
WHERE inserted_timestamp > dateadd('day', -30, sysdate())
{% endif %}
),
existing_logs_to_exclude AS (
SELECT _log_id
FROM {{ ref('streamline__decoded_logs_complete') }} l
INNER JOIN target_blocks b using (block_number)
),
candidate_logs AS (
SELECT
l.block_number,
l.tx_hash,
l.event_index,
l.contract_address,
l.topics,
l.data,
concat(l.tx_hash::string, '-', l.event_index::string) as _log_id
FROM target_blocks b
INNER JOIN {{ ref('core__fact_event_logs') }} l using (block_number)
WHERE l.tx_status = 'SUCCESS' and date_trunc('month', l.block_timestamp) = '{{month}}'::timestamp
)
SELECT
l.block_number,
l._log_id,
A.abi,
OBJECT_CONSTRUCT(
'topics', l.topics,
'data', l.data,
'address', l.contract_address
) AS data
FROM candidate_logs l
INNER JOIN new_abis A
ON A.parent_contract_address = l.contract_address
AND A.event_signature = l.topics[0]::STRING
AND l.block_number BETWEEN A.start_block AND A.end_block
WHERE NOT EXISTS (
SELECT 1
FROM existing_logs_to_exclude e
WHERE e._log_id = l._log_id
)
LIMIT {{ params.sql_limit }}
)
{% endset %}
{# Create the view #}
{% do run_query(create_view_query) %}
{{ log("Created view for month " ~ month.strftime('%Y-%m'), info=True) }}
{% if var("STREAMLINE_INVOKE_STREAMS", false) %}
{# Check if rows exist first #}
{% set check_rows_query %}
SELECT EXISTS(SELECT 1 FROM streamline.{{view_name}} LIMIT 1)
{% endset %}
{% set results = run_query(check_rows_query) %}
{% set has_rows = results.columns[0].values()[0] %}
{% if has_rows %}
{# Invoke streamline, if rows exist to decode #}
{% set decode_query %}
SELECT
streamline.udf_bulk_decode_logs_v2(
PARSE_JSON(
$${ "external_table": "decoded_logs",
"producer_batch_size": {{ params.producer_batch_size }},
"sql_limit": {{ params.sql_limit }},
"sql_source": "{{view_name}}",
"worker_batch_size": {{ params.worker_batch_size }} }$$
)
);
{% endset %}
{% do run_query(decode_query) %}
{{ log("Triggered decoding for month " ~ month.strftime('%Y-%m'), info=True) }}
{# Call wait since we actually did some decoding #}
{% do run_query("call system$wait(" ~ wait_time ~ ")") %}
{{ log("Completed wait after decoding for month " ~ month.strftime('%Y-%m'), info=True) }}
{% else %}
{{ log("No rows to decode for month " ~ month.strftime('%Y-%m'), info=True) }}
{% endif %}
{% endif %}
{% endfor %}
{% endif %}
{% endmacro %}

View File

@ -1,29 +0,0 @@
{% macro run_decoded_logs_history() %}
{% set blockchain = var('GLOBAL_PROD_DB_NAME','').lower() %}
{% set check_for_new_user_abis_query %}
select 1
from {{ ref('silver__user_verified_abis') }}
where _inserted_timestamp::date = sysdate()::date
and dayname(sysdate()) <> 'Sat'
{% endset %}
{% set results = run_query(check_for_new_user_abis_query) %}
{% if execute %}
{% set new_user_abis = results.columns[0].values()[0] %}
{% if new_user_abis %}
{% set invoke_workflow_query %}
SELECT
github_actions.workflow_dispatches(
'FlipsideCrypto',
'{{ blockchain }}' || '-models',
'dbt_run_streamline_decoded_logs_history.yml',
NULL
)
{% endset %}
{% do run_query(invoke_workflow_query) %}
{% endif %}
{% endif %}
{% endmacro %}

View File

@ -1,226 +0,0 @@
{% macro silver_traces_v1(
full_reload_start_block,
full_reload_blocks,
full_reload_mode = false,
TRACES_ARB_MODE = false,
TRACES_SEI_MODE = false,
TRACES_KAIA_MODE = false,
use_partition_key = false,
schema_name = 'bronze'
) %}
WITH bronze_traces AS (
SELECT
block_number,
{% if use_partition_key %}
partition_key,
{% else %}
_partition_by_block_id AS partition_key,
{% endif %}
VALUE :array_index :: INT AS tx_position,
DATA :result AS full_traces,
{% if TRACES_SEI_MODE %}
DATA :txHash :: STRING AS tx_hash,
{% endif %}
_inserted_timestamp
FROM
{% if is_incremental() and not full_reload_mode %}
{{ ref(
schema_name ~ '__traces'
) }}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
)
AND DATA :result IS NOT NULL {% if TRACES_ARB_MODE %}
AND block_number > 22207817
{% endif %}
{% elif is_incremental() and full_reload_mode %}
{{ ref(
schema_name ~ '__traces_fr'
) }}
WHERE
{% if use_partition_key %}
partition_key BETWEEN (
SELECT
MAX(partition_key) - 100000
FROM
{{ this }}
)
AND (
SELECT
MAX(partition_key) + {{ full_reload_blocks }}
FROM
{{ this }}
)
{% else %}
_partition_by_block_id BETWEEN (
SELECT
MAX(_partition_by_block_id) - 100000
FROM
{{ this }}
)
AND (
SELECT
MAX(_partition_by_block_id) + {{ full_reload_blocks }}
FROM
{{ this }}
)
{% endif %}
{% if TRACES_ARB_MODE %}
AND block_number > 22207817
{% endif %}
{% else %}
{{ ref(
schema_name ~ '__traces_fr'
) }}
WHERE
{% if use_partition_key %}
partition_key <= {{ full_reload_start_block }}
{% else %}
_partition_by_block_id <= {{ full_reload_start_block }}
{% endif %}
{% if TRACES_ARB_MODE %}
AND block_number > 22207817
{% endif %}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number, tx_position
ORDER BY
_inserted_timestamp DESC)) = 1
),
flatten_traces AS (
SELECT
block_number,
{% if TRACES_SEI_MODE %}
tx_hash,
{% else %}
tx_position,
{% endif %}
partition_key,
IFF(
path IN (
'result',
'result.value',
'result.type',
'result.to',
'result.input',
'result.gasUsed',
'result.gas',
'result.from',
'result.output',
'result.error',
'result.revertReason',
'result.time',
'gasUsed',
'gas',
'type',
'to',
'from',
'value',
'input',
'error',
'output',
'time',
'revertReason'
{% if TRACES_ARB_MODE %},
'afterEVMTransfers',
'beforeEVMTransfers',
'result.afterEVMTransfers',
'result.beforeEVMTransfers'
{% endif %}
{% if TRACES_KAIA_MODE %},
'reverted',
'result.reverted'
{% endif %}
),
'ORIGIN',
REGEXP_REPLACE(REGEXP_REPLACE(path, '[^0-9]+', '_'), '^_|_$', '')
) AS trace_address,
_inserted_timestamp,
OBJECT_AGG(
key,
VALUE
) AS trace_json,
CASE
WHEN trace_address = 'ORIGIN' THEN NULL
WHEN POSITION(
'_' IN trace_address
) = 0 THEN 'ORIGIN'
ELSE REGEXP_REPLACE(
trace_address,
'_[0-9]+$',
'',
1,
1
)
END AS parent_trace_address,
SPLIT(
trace_address,
'_'
) AS trace_address_array
FROM
bronze_traces txs,
TABLE(
FLATTEN(
input => PARSE_JSON(
txs.full_traces
),
recursive => TRUE
)
) f
WHERE
f.index IS NULL
AND f.key != 'calls'
AND f.path != 'result'
{% if TRACES_ARB_MODE %}
AND f.path NOT LIKE 'afterEVMTransfers[%'
AND f.path NOT LIKE 'beforeEVMTransfers[%'
{% endif %}
{% if TRACES_KAIA_MODE %}
and f.key not in ('message', 'contract')
{% endif %}
GROUP BY
block_number,
{% if TRACES_SEI_MODE %}
tx_hash,
{% else %}
tx_position,
{% endif %}
partition_key,
trace_address,
_inserted_timestamp
)
SELECT
block_number,
{% if TRACES_SEI_MODE %}
tx_hash,
{% else %}
tx_position,
{% endif %}
trace_address,
parent_trace_address,
trace_address_array,
trace_json,
partition_key,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['block_number'] +
(['tx_hash'] if TRACES_SEI_MODE else ['tx_position']) +
['trace_address']
) }} AS traces_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
flatten_traces qualify(ROW_NUMBER() over(PARTITION BY traces_id
ORDER BY
_inserted_timestamp DESC)) = 1
{% endmacro %}

View File

@ -1,101 +0,0 @@
{% macro streamline_external_table_query_decoder(
source_name,
source_version
) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
WITH meta AS (
SELECT
job_created_time AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number,
TO_DATE(
concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5))
) AS _partition_by_created_date
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}')
) A
)
SELECT
block_number,
id :: STRING AS id,
DATA,
metadata,
b.file_name,
_inserted_timestamp,
s._partition_by_block_number AS _partition_by_block_number,
s._partition_by_created_date AS _partition_by_created_date
FROM
{{ source(
"bronze_streamline",
source_name ~ source_version
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date
WHERE
b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date
AND s._partition_by_created_date >= DATEADD('day', -2, CURRENT_TIMESTAMP())
AND DATA :error IS NULL
AND DATA IS NOT NULL
{% endmacro %}
{% macro streamline_external_table_query_decoder_fr(
source_name,
source_version
) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number,
TO_DATE(
concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5))
) AS _partition_by_created_date
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}'
)
) A
)
SELECT
block_number,
id :: STRING AS id,
DATA,
metadata,
b.file_name,
_inserted_timestamp,
s._partition_by_block_number AS _partition_by_block_number,
s._partition_by_created_date AS _partition_by_created_date
FROM
{{ source(
"bronze_streamline",
source_name ~ source_version
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date
WHERE
b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date
AND DATA :error IS NULL
AND DATA IS NOT NULL
{% endmacro %}

View File

@ -1,141 +0,0 @@
{% macro streamline_external_table_query(
source_name,
source_version,
partition_function,
balances,
block_number,
uses_receipts_by_hash
) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
WITH meta AS (
SELECT
job_created_time AS _inserted_timestamp,
file_name,
{{ partition_function }} AS partition_key
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}')
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp
{% if balances %},
r.block_timestamp :: TIMESTAMP AS block_timestamp
{% endif %}
{% if block_number %},
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
{% endif %}
{% if uses_receipts_by_hash %},
s.value :"TX_HASH" :: STRING AS tx_hash
{% endif %}
FROM
{{ source(
"bronze_streamline",
source_name ~ source_version
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.partition_key
{% if balances %}
JOIN {{ ref('_block_ranges') }}
r
ON r.block_number = COALESCE(
s.value :"BLOCK_NUMBER" :: INT,
s.value :"block_number" :: INT
)
{% endif %}
WHERE
b.partition_key = s.partition_key
AND DATA :error IS NULL
AND DATA IS NOT NULL
{% endmacro %}
{% macro streamline_external_table_query_fr(
source_name,
source_version,
partition_function,
partition_join_key,
balances,
block_number,
uses_receipts_by_hash
) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
{{ partition_function }} AS partition_key
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}'
)
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp
{% if balances %},
r.block_timestamp :: TIMESTAMP AS block_timestamp
{% endif %}
{% if block_number %},
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.value :"block_number" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
{% endif %}
{% if uses_receipts_by_hash %},
s.value :"TX_HASH" :: STRING AS tx_hash
{% endif %}
FROM
{{ source(
"bronze_streamline",
source_name ~ source_version
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.{{ partition_join_key }}
{% if balances %}
JOIN {{ ref('_block_ranges') }}
r
ON r.block_number = COALESCE(
s.value :"BLOCK_NUMBER" :: INT,
s.value :"block_number" :: INT
)
{% endif %}
WHERE
b.partition_key = s.{{ partition_join_key }}
AND DATA :error IS NULL
AND DATA IS NOT NULL
{% endmacro %}

View File

@ -1,36 +0,0 @@
{% macro log_bronze_details(source_name, source_version, model_type, partition_function, partition_join_key, block_number, uses_receipts_by_hash) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
{% if model_type != '' %}
{% set model_type = '_' ~ model_type %}
{% endif %}
{%- if flags.WHICH == 'compile' and execute -%}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log(source_name ~ model_type ~ '_PARTITION_FUNCTION: ' ~ partition_function, info=True) }}
{{ log(source_name ~ model_type ~ '_PARTITION_JOIN_KEY: ' ~ partition_join_key, info=True) }}
{{ log(source_name ~ model_type ~ '_BLOCK_NUMBER: ' ~ block_number, info=True) }}
{% if uses_receipts_by_hash %}
{{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }}
{% endif %}
{{ log("", info=True) }}
{{ log("=== Source Details ===", info=True) }}
{{ log("Source: " ~ source('bronze_streamline', source_name.lower() ~ source_version.lower()), info=True) }}
{{ log("", info=True) }}
{% set config_log = '\n' %}
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
{% set config_log = config_log ~ '\n{{ config (\n' %}
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
{% set config_log = config_log ~ ') }}\n' %}
{{ log(config_log, info=True) }}
{{ log("", info=True) }}
{%- endif -%}
{% endmacro %}

View File

@ -1,29 +0,0 @@
{% macro log_complete_details(post_hook, full_refresh_type, uses_receipts_by_hash) %}
{%- if flags.WHICH == 'compile' and execute -%}
{% if uses_receipts_by_hash %}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }}
{% endif %}
{% set config_log = '\n' %}
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
{% set config_log = config_log ~ '\n{{ config (\n' %}
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
{% set config_log = config_log ~ ' unique_key = "' ~ config.get('unique_key') ~ '",\n' %}
{% set config_log = config_log ~ ' cluster_by = "' ~ config.get('cluster_by') ~ '",\n' %}
{% set config_log = config_log ~ ' merge_update_columns = ' ~ config.get('merge_update_columns') | tojson ~ ',\n' %}
{% set config_log = config_log ~ ' post_hook = "' ~ post_hook ~ '",\n' %}
{% set config_log = config_log ~ ' incremental_predicates = ' ~ config.get('incremental_predicates') | tojson ~ ',\n' %}
{% set config_log = config_log ~ ' full_refresh = ' ~ full_refresh_type ~ ',\n' %}
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
{% set config_log = config_log ~ ') }}\n' %}
{{ log(config_log, info=True) }}
{{ log("", info=True) }}
{%- endif -%}
{% endmacro %}

View File

@ -1,36 +0,0 @@
{% macro log_model_details(vars=false, params=false) %}
{%- if execute -%}
/*
DBT Model Config:
{{ model.config | tojson(indent=2) }}
*/
{% if vars is not false %}
{% if var('LOG_MODEL_DETAILS', false) %}
{{ log( vars | tojson(indent=2), info=True) }}
{% endif %}
/*
Variables:
{{ vars | tojson(indent=2) }}
*/
{% endif %}
{% if params is not false %}
{% if var('LOG_MODEL_DETAILS', false) %}
{{ log( params | tojson(indent=2), info=True) }}
{% endif %}
/*
Parameters:
{{ params | tojson(indent=2) }}
*/
{% endif %}
/*
Raw Code:
{{ model.raw_code }}
*/
{%- endif -%}
{% endmacro %}

View File

@ -1,55 +0,0 @@
{% macro log_streamline_details(model_name, model_type, node_url, model_quantum_state, sql_limit, testing_limit, order_by_clause, new_build, streamline_params, uses_receipts_by_hash, method, method_params, min_block=0) %}
{%- if flags.WHICH == 'compile' and execute -%}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log("START_UP_BLOCK: " ~ min_block, info=True) }}
{{ log("", info=True) }}
{{ log("=== API Details ===", info=True) }}
{{ log("NODE_URL: " ~ node_url, info=True) }}
{{ log("NODE_SECRET_PATH: " ~ var('GLOBAL_NODE_SECRET_PATH'), info=True) }}
{{ log("", info=True) }}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_model_quantum_state').upper() ~ ': ' ~ model_quantum_state, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_sql_limit').upper() ~ ': ' ~ sql_limit, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_testing_limit').upper() ~ ': ' ~ testing_limit, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper() ~ ': ' ~ order_by_clause, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_new_build').upper() ~ ': ' ~ new_build, info=True) }}
{{ log('USES_RECEIPTS_BY_HASH' ~ ': ' ~ uses_receipts_by_hash, info=True) }}
{{ log("", info=True) }}
{{ log("=== RPC Details ===", info=True) }}
{{ log(model_name ~ ": {", info=True) }}
{{ log(" method: '" ~ method ~ "',", info=True) }}
{{ log(" method_params: " ~ method_params, info=True) }}
{{ log("}", info=True) }}
{{ log("", info=True) }}
{% set params_str = streamline_params | tojson %}
{% set params_formatted = params_str | replace('{', '{\n ') | replace('}', '\n }') | replace(', ', ',\n ') %}
{# Clean up the method_params formatting #}
{% set params_formatted = params_formatted | replace('"method_params": "', '"method_params": "') | replace('\\n', ' ') | replace('\\u0027', "'") %}
{% set config_log = '\n' %}
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
{% set config_log = config_log ~ '\n{{ config (\n' %}
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
{% set config_log = config_log ~ ' post_hook = fsc_utils.if_data_call_function_v2(\n' %}
{% set config_log = config_log ~ ' func = "streamline.udf_bulk_rest_api_v2",\n' %}
{% set config_log = config_log ~ ' target = "' ~ this.schema ~ '.' ~ this.identifier ~ '",\n' %}
{% set config_log = config_log ~ ' params = ' ~ params_formatted ~ '\n' %}
{% set config_log = config_log ~ ' ),\n' %}
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
{% set config_log = config_log ~ ') }}\n' %}
{{ log(config_log, info=True) }}
{{ log("", info=True) }}
{%- endif -%}
{% endmacro %}

View File

@ -1,47 +0,0 @@
{% macro set_default_variables_streamline(model_name, model_type) %}
{%- set node_url = var('GLOBAL_NODE_URL', '{Service}/{Authentication}') -%}
{%- set node_secret_path = var('GLOBAL_NODE_SECRET_PATH', '') -%}
{%- set model_quantum_state = var((model_name ~ '_' ~ model_type ~ '_quantum_state').upper(), 'streamline') -%}
{%- set testing_limit = var((model_name ~ '_' ~ model_type ~ '_testing_limit').upper(), none) -%}
{%- set new_build = var((model_name ~ '_' ~ model_type ~ '_new_build').upper(), false) -%}
{%- set default_order = 'ORDER BY partition_key DESC, block_number DESC' if model_type.lower() == 'realtime'
else 'ORDER BY partition_key ASC, block_number ASC' -%}
{%- set order_by_clause = var((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper(), default_order) -%}
{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%}
{%- set variables = {
'node_url': node_url,
'node_secret_path': node_secret_path,
'model_quantum_state': model_quantum_state,
'testing_limit': testing_limit,
'new_build': new_build,
'order_by_clause': order_by_clause,
'uses_receipts_by_hash': uses_receipts_by_hash
} -%}
{{ return(variables) }}
{% endmacro %}
{% macro set_default_variables_bronze(source_name, model_type) %}
{%- set partition_function = var(source_name ~ model_type ~ '_PARTITION_FUNCTION',
"CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER)")
-%}
{%- set partition_join_key = var(source_name ~ model_type ~ '_PARTITION_JOIN_KEY', 'partition_key') -%}
{%- set block_number = var(source_name ~ model_type ~ '_BLOCK_NUMBER', true) -%}
{%- set balances = var(source_name ~ model_type ~ '_BALANCES', false) -%}
{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%}
{%- set variables = {
'partition_function': partition_function,
'partition_join_key': partition_join_key,
'block_number': block_number,
'balances': balances,
'uses_receipts_by_hash': uses_receipts_by_hash
} -%}
{{ return(variables) }}
{% endmacro %}

View File

@ -1,57 +0,0 @@
{% macro set_streamline_parameters(model_name, model_type, multiplier=1) %}
{%- set rpc_config_details = {
"blocks_transactions": {
"method": 'eth_getBlockByNumber',
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE)',
"exploded_key": ['result', 'result.transactions']
},
"receipts_by_hash": {
"method": 'eth_getTransactionReceipt',
"method_params": 'ARRAY_CONSTRUCT(tx_hash)'
},
"receipts": {
"method": 'eth_getBlockReceipts',
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number))',
"exploded_key": ['result'],
"lambdas": 2
},
"traces": {
"method": 'debug_traceBlockByNumber',
"method_params": "ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s'))",
"exploded_key": ['result'],
"lambdas": 2
},
"confirm_blocks": {
"method": 'eth_getBlockByNumber',
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), FALSE)'
}
} -%}
{%- set rpc_config = rpc_config_details[model_name.lower()] -%}
{%- set params = {
"external_table": var((model_name ~ '_' ~ model_type ~ '_external_table').upper(), model_name.lower()),
"sql_limit": var((model_name ~ '_' ~ model_type ~ '_sql_limit').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier),
"producer_batch_size": var((model_name ~ '_' ~ model_type ~ '_producer_batch_size').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier),
"worker_batch_size": var(
(model_name ~ '_' ~ model_type ~ '_worker_batch_size').upper(),
(2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier) // (rpc_config.get('lambdas', 1))
),
"sql_source": (model_name ~ '_' ~ model_type).lower(),
"method": rpc_config['method'],
"method_params": rpc_config['method_params']
} -%}
{%- if rpc_config.get('exploded_key') is not none -%}
{%- do params.update({"exploded_key": tojson(rpc_config['exploded_key'])}) -%}
{%- endif -%}
{%- if rpc_config.get('lambdas') is not none -%}
{%- do params.update({"lambdas": rpc_config['lambdas']}) -%}
{%- endif -%}
{{ return(params) }}
{% endmacro %}

View File

@ -1,12 +0,0 @@
{% macro lookback() %}
{% if execute and is_incremental() %}
{% set query %}
SELECT
MAX(_inserted_timestamp) :: DATE - 1
FROM
{{ this }};
{% endset %}
{% set last_week = run_query(query).columns [0] [0] %}
{% do return(last_week) %}
{% endif %}
{% endmacro %}

View File

@ -1,21 +0,0 @@
{% macro create_aws_polygon_api() %}
{{ log(
"Creating integration for target:" ~ target
) }}
{% if target.name == "prod" %}
{% set sql %}
CREATE api integration IF NOT EXISTS aws_polygon_api api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/polygon-api-prod-rolesnowflakeudfsAF733095-9hTxS6yYCWlj' api_allowed_prefixes = (
'https://p6dhi5vxn4.execute-api.us-east-1.amazonaws.com/prod/'
) enabled = TRUE;
{% endset %}
{% do run_query(sql) %}
{% elif target.name == "dev" %}
{% set sql %}
CREATE api integration IF NOT EXISTS aws_polygon_api_dev api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/polygon-api-dev-rolesnowflakeudfsAF733095-10H2D361D3DJD' api_allowed_prefixes = (
'https://rzyjrd54s6.execute-api.us-east-1.amazonaws.com/dev/'
) enabled = TRUE;
{% endset %}
{% do run_query(sql) %}
{% endif %}
{% endmacro %}

View File

@ -1,79 +0,0 @@
{% macro create_udf_get_chainhead() %}
CREATE
OR REPLACE EXTERNAL FUNCTION streamline.udf_get_chainhead() returns variant api_integration =
{% if target.name == "prod" %}
aws_polygon_api AS 'https://p6dhi5vxn4.execute-api.us-east-1.amazonaws.com/prod/get_chainhead'
{% else %}
aws_polygon_api_dev AS 'https://rzyjrd54s6.execute-api.us-east-1.amazonaws.com/dev/get_chainhead'
{%- endif %};
{% endmacro %}
{% macro create_udf_bulk_json_rpc() %}
CREATE
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_json_rpc(
json variant
) returns text api_integration = {% if target.name == "prod" %}
aws_polygon_api AS 'https://p6dhi5vxn4.execute-api.us-east-1.amazonaws.com/prod/udf_bulk_json_rpc'
{% else %}
aws_polygon_api_dev AS 'https://rzyjrd54s6.execute-api.us-east-1.amazonaws.com/dev/udf_bulk_json_rpc'
{%- endif %};
{% endmacro %}
{% macro create_udf_decode_array_string() %}
CREATE
OR REPLACE EXTERNAL FUNCTION streamline.udf_decode(
abi ARRAY,
DATA STRING
) returns ARRAY api_integration = {% if target.name == "prod" %}
aws_polygon_api AS 'https://p6dhi5vxn4.execute-api.us-east-1.amazonaws.com/prod/decode_function'
{% else %}
aws_polygon_api_dev AS 'https://rzyjrd54s6.execute-api.us-east-1.amazonaws.com/dev/decode_function'
{%- endif %};
{% endmacro %}
{% macro create_udf_decode_array_object() %}
CREATE
OR REPLACE EXTERNAL FUNCTION streamline.udf_decode(
abi ARRAY,
DATA OBJECT
) returns ARRAY api_integration = {% if target.name == "prod" %}
aws_polygon_api AS 'https://p6dhi5vxn4.execute-api.us-east-1.amazonaws.com/prod/decode_log'
{% else %}
aws_polygon_api_dev AS 'https://rzyjrd54s6.execute-api.us-east-1.amazonaws.com/dev/decode_log'
{%- endif %};
{% endmacro %}
{% macro create_udf_bulk_decode_logs() %}
CREATE
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_decode_logs(
json OBJECT
) returns ARRAY api_integration = {% if target.name == "prod" %}
aws_polygon_api AS 'https://p6dhi5vxn4.execute-api.us-east-1.amazonaws.com/prod/bulk_decode_logs'
{% else %}
aws_polygon_api_dev AS'https://rzyjrd54s6.execute-api.us-east-1.amazonaws.com/dev/bulk_decode_logs'
{%- endif %};
{% endmacro %}
{% macro create_udf_introspect(
drop_ = False
) %}
{% set name_ = 'silver.udf_introspect' %}
{% set signature = [('json', 'variant')] %}
{% set return_type = 'text' %}
{% set sql_ = construct_api_route("introspect") %}
{% if not drop_ %}
{{ create_sql_function(
name_ = name_,
signature = signature,
return_type = return_type,
sql_ = sql_,
api_integration = var("API_INTEGRATION")
) }}
{% else %}
{{ drop_function(
name_,
signature = signature,
) }}
{% endif %}
{% endmacro %}

View File

@ -1,22 +0,0 @@
{% test missing_decoded_logs(model) %}
SELECT
l.block_number,
CONCAT(l.tx_hash :: STRING, '-', l.event_index :: STRING) AS _log_id
FROM
{{ ref('core__fact_event_logs') }}
l
LEFT JOIN {{ model }}
d
ON l.block_number = d.block_number
AND CONCAT(
l.tx_hash,
'-',
l.event_index
) = d._log_id
WHERE
l.contract_address = LOWER('0x0d500B1d8E8eF31E21C99d1Db9A6444d3ADf1270') -- WMATIC
AND l.topics [0] :: STRING = '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef' -- Transfer
AND l.block_timestamp BETWEEN DATEADD('hour', -48, SYSDATE())
AND DATEADD('hour', -6, SYSDATE())
AND d._log_id IS NULL
{% endtest %}

View File

@ -1,193 +0,0 @@
{% macro missing_receipts(
model
) %}
WITH txs_base AS (
SELECT
block_number AS base_block_number,
tx_hash AS base_tx_hash
FROM
{{ ref('test_silver__transactions_full') }}
),
model_name AS (
SELECT
block_number AS model_block_number,
tx_hash AS model_tx_hash
FROM
{{ model }}
)
SELECT
base_block_number,
base_tx_hash,
model_block_number,
model_tx_hash
FROM
txs_base
LEFT JOIN model_name
ON base_block_number = model_block_number
AND base_tx_hash = model_tx_hash
WHERE
model_tx_hash IS NULL
OR model_block_number IS NULL
{% endmacro %}
{% macro recent_missing_receipts(
model
) %}
WITH txs_base AS (
SELECT
block_number AS base_block_number,
tx_hash AS base_tx_hash
FROM
{{ ref('test_silver__transactions_recent') }}
),
model_name AS (
SELECT
block_number AS model_block_number,
tx_hash AS model_tx_hash
FROM
{{ model }}
)
SELECT
base_block_number,
base_tx_hash,
model_block_number,
model_tx_hash
FROM
txs_base
LEFT JOIN model_name
ON base_block_number = model_block_number
AND base_tx_hash = model_tx_hash
WHERE
model_tx_hash IS NULL
OR model_block_number IS NULL
{% endmacro %}
{% macro missing_traces(
model
) %}
WITH txs_base AS (
SELECT
block_number AS base_block_number,
tx_hash AS base_tx_hash
FROM
{{ ref('test_silver__transactions_full') }}
WHERE
from_address <> '0x0000000000000000000000000000000000000000'
AND to_address <> '0x0000000000000000000000000000000000000000'
),
model_name AS (
SELECT
block_number AS model_block_number,
tx_hash AS model_tx_hash
FROM
{{ model }}
)
SELECT
base_block_number,
base_tx_hash,
model_block_number,
model_tx_hash
FROM
txs_base
LEFT JOIN model_name
ON base_block_number = model_block_number
AND base_tx_hash = model_tx_hash
WHERE
(
model_tx_hash IS NULL
OR model_block_number IS NULL
)
AND base_block_number NOT IN (
SELECT
block_number
FROM
{{ ref('silver_observability__excluded_trace_blocks') }}
)
{% endmacro %}
{% macro missing_confirmed_txs(
model1,
model2
) %}
WITH txs_base AS (
SELECT
block_number AS base_block_number,
block_hash AS base_block_hash,
tx_hash AS base_tx_hash
FROM
{{ model1 }}
),
model_name AS (
SELECT
block_number AS model_block_number,
block_hash AS model_block_hash,
tx_hash AS model_tx_hash
FROM
{{ model2 }}
)
SELECT
DISTINCT base_block_number AS block_number
FROM
txs_base
LEFT JOIN model_name
ON base_block_number = model_block_number
AND base_tx_hash = model_tx_hash
AND base_block_hash = model_block_hash
WHERE
model_tx_hash IS NULL
AND model_block_number <= (
SELECT
MAX(base_block_number)
FROM
txs_base
)
{% endmacro %}
{% macro recent_missing_traces(
model,
threshold
) %}
WITH txs_base AS (
SELECT
block_number AS base_block_number,
tx_hash AS base_tx_hash
FROM
{{ ref('test_silver__transactions_recent') }}
WHERE
from_address <> '0x0000000000000000000000000000000000000000'
AND to_address <> '0x0000000000000000000000000000000000000000'
),
model_name AS (
SELECT
block_number AS model_block_number,
tx_hash AS model_tx_hash
FROM
{{ model }}
),
FINAL AS (
SELECT
base_block_number,
base_tx_hash,
model_block_number,
model_tx_hash
FROM
txs_base
LEFT JOIN model_name
ON base_block_number = model_block_number
AND base_tx_hash = model_tx_hash
WHERE
model_tx_hash IS NULL
OR model_block_number IS NULL
)
SELECT
*
FROM
FINAL
WHERE
(
SELECT
COUNT(*) >= {{ threshold }}
FROM
FINAL
)
{% endmacro %}

View File

@ -1,78 +0,0 @@
{% macro if_data_call_function(
func,
target
) %}
{% if var(
"STREAMLINE_INVOKE_STREAMS"
) %}
{% if execute %}
{{ log(
"Running macro `if_data_call_function`: Calling udf " ~ func ~ " on " ~ target,
True
) }}
{% endif %}
SELECT
{{ func }}
WHERE
EXISTS(
SELECT
1
FROM
{{ target }}
LIMIT
1
)
{% else %}
{% if execute %}
{{ log(
"Running macro `if_data_call_function`: NOOP",
False
) }}
{% endif %}
SELECT
NULL
{% endif %}
{% endmacro %}
{% macro if_data_call_wait() %}
{% if var(
"STREAMLINE_INVOKE_STREAMS"
) %}
{% set query %}
SELECT
1
WHERE
EXISTS(
SELECT
1
FROM
{{ model.schema ~ "." ~ model.alias }}
LIMIT
1
) {% endset %}
{% if execute %}
{% set results = run_query(
query
) %}
{% if results %}
{{ log(
"Waiting...",
info = True
) }}
{% set wait_query %}
SELECT
system$wait(
{{ var(
"WAIT",
600
) }}
) {% endset %}
{% do run_query(wait_query) %}
{% else %}
SELECT
NULL;
{% endif %}
{% endif %}
{% endif %}
{% endmacro %}

View File

@ -16,36 +16,34 @@ There is more information on how to use dbt docs in the last section of this doc
**Click on the links below to jump to the documentation for each schema.**
### Core Tables (polygon.core)
### Core Tables
**Dimension Tables:**
- [dim_contract_abis](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.core__dim_contract_abis)
- [dim_contracts](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.core__dim_contracts)
- [dim_labels](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.core__dim_labels)
**Dimensional Tables**
- [dim_contracts](https://flipsidecrypto.github.io/polygon-models/#!/model/model.fsc_evm.core__dim_contracts)
- [dim_contract_abis](https://flipsidecrypto.github.io/polygon-models/#!/model/model.fsc_evm.core__dim_contract_abis)
- [dim_labels](https://flipsidecrypto.github.io/polygon-models/#!/model/model.fsc_evm.core__dim_labels)
**Fact Tables:**
- [fact_blocks](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.core__fact_blocks)
- [fact_decoded_event_logs](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.core__fact_decoded_event_logs)
- [fact_event_logs](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.core__fact_event_logs)
- [fact_token_transfers](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.core__fact_token_transfers)
- [fact_traces](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.core__fact_traces)
- [fact_transactions](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.core__fact_transactions)
- [fact_blocks](https://flipsidecrypto.github.io/polygon-models/#!/model/model.fsc_evm.core__fact_blocks)
- [fact_transactions](https://flipsidecrypto.github.io/polygon-models/#!/model/model.fsc_evm.core__fact_transactions)
- [fact_event_logs](https://flipsidecrypto.github.io/polygon-models/#!/model/model.fsc_evm.core__fact_event_logs)
- [fact_traces](https://flipsidecrypto.github.io/polygon-models/#!/model/model.fsc_evm.core__fact_traces)
**Convenience Tables:**
- [ez_decoded_event_logs](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.core__ez_decoded_event_logs)
- [ez_native_transfers](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.core__ez_native_transfers)
- [ez_token_transfers](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.core__ez_token_transfers)
- [ez_decoded_event_logs](https://flipsidecrypto.github.io/polygon-models/#!/model/model.fsc_evm.core__ez_decoded_event_logs)
- [ez_native_transfers](https://flipsidecrypto.github.io/polygon-models/#!/model/model.fsc_evm.core__ez_native_transfers)
- [ez_token_transfers](https://flipsidecrypto.github.io/polygon-models/#!/model/model.fsc_evm.core__ez_token_transfers)
### Price Tables (polygon.price)
- [dim_asset_metadata](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.price__dim_asset_metadata)
- [fact_prices_ohlc_hourly](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.price__fact_prices_ohlc_hourly)
- [ez_asset_metadata](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.price__ez_asset_metadata)
- [ez_prices_hourly](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.price__ez_prices_hourly)
- [dim_asset_metadata](https://flipsidecrypto.github.io/polygon-models/#!/model/model.fsc_evm.price__dim_asset_metadata)
- [fact_prices_ohlc_hourly](https://flipsidecrypto.github.io/polygon-models/#!/model/model.fsc_evm.price__fact_prices_ohlc_hourly)
- [ez_asset_metadata](https://flipsidecrypto.github.io/polygon-models/#!/model/model.fsc_evm.price__ez_asset_metadata)
- [ez_prices_hourly](https://flipsidecrypto.github.io/polygon-models/#!/model/model.fsc_evm.price__ez_prices_hourly)
### DeFi Tables (polygon.defi)
- [dim_dex_liquidity_pools](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.defi__dim_dex_liquidity_pools)
- [ez_dex_swaps](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.defi__ez_dex_swaps)
- [ez_bridge_activity](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.defi__ez_bridge_activity)
- [ez_dex_swaps](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.defi__ez_dex_swaps)
- [dim_dex_liquidity_pools](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.defi__dim_dex_liquidity_pools)
- [ez_lending_borrows](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.defi__ez_lending_borrows)
- [ez_lending_deposits](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.defi__ez_lending_deposits)
- [ez_lending_flashloans](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.defi__ez_lending_flashloans)
@ -53,13 +51,14 @@ There is more information on how to use dbt docs in the last section of this doc
- [ez_lending_repayments](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.defi__ez_lending_repayments)
- [ez_lending_withdraws](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.defi__ez_lending_withdraws)
### Flipside Partner Tables (polygon.partner_name)
### NFT Tables (polygon.nft)
- [ez_nft_mints](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.nft__ez_nft_mints)
- [ez_nft_transfers](https://flipsidecrypto.github.io/polygon-models/#!/model/model.fsc_evm.nft__ez_nft_transfers)
- [ez_nft_sales](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.nft__ez_nft_sales)
- [ez_nft_transfers](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.nft__ez_nft_transfers)
### Stats Tables (polygon.stats)
- [ez_core_metrics_hourly](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.stats__ez_core_metrics_hourly)
- [ez_core_metrics_hourly](https://flipsidecrypto.github.io/polygon-models/#!/model/model.fsc_evm.stats__ez_core_metrics_hourly)
## **Helpful User-Defined Functions (UDFs)**
@ -79,18 +78,7 @@ The dimension tables are sourced from a variety of on-chain and off-chain source
Convenience views (denoted ez_) are a combination of different fact and dimension tables. These views are built to make it easier to query the data.
## **Contract Decoding**
### Adding a contract for decoding
To add a contract for decoding, please visit [here](https://science.flipsidecrypto.xyz/abi-requestor/).
Assuming the submitted ABI is valid, records will be decoded within 24 hours. If records are not decoded within 24 hours, or for any ABI updates, please submit a ticket within our Discord.
### General Process Overview
The majority of our ABIs have been sourced from Etherscan, and we are constantly asking Etherscan for new ABIs. However, this is not comprehensive, and therefore we must also rely on our users to submit ABIs for decoding.
If we are unable to locate an ABI for the contract from either Etherscan or our users, we will attempt to match the contract to a similar ABI. This is done by comparing the contract bytecode to a list of known contract bytecodes. If we are able to match the contract to a similar ABI, we will decode the contract using the similar ABI. You can see the source of each ABI in the `dim_contract_abis` table within the `abi_source` column.
Once ABIs have been verified, events within the last day of blocks will be decoded within approximately 90 minutes. Events older than 1 day will be decoded within 24 hours in the majority of cases. The exception here is if the contract has a massive number of events, in which case it may take longer.
NOTE: Polygon is currently operating in it's Testnet phase. Flipside will provide Mainnet data tables once Polygon Mainnet is deployed.
## **Using dbt docs**
### Navigation
@ -99,7 +87,7 @@ You can use the ```Project``` and ```Database``` navigation tabs on the left sid
### Database Tab
This view shows relations (tables and views) grouped into database schemas. Note that ephemeral models are *not* shown in this interface, as they do not exist in the database.
This view shows relations (tables and views) grouped into database schemas. Note that ephemeral models are *not* shown in this interface, as they do not exist in the datapolygon.
### Graph Exploration
@ -113,10 +101,11 @@ Note that you can also right-click on models to interactively filter and explore
### **More information**
- [Flipside](https://flipsidecrypto.xyz)
- [Flipside](https://flipsidecrypto.xyz/)
- [Velocity](https://app.flipsidecrypto.com/velocity?nav=Discover)
- [Tutorials](https://docs.flipsidecrypto.com/our-data/tutorials)
- [Github](https://github.com/FlipsideCrypto/polygon-models)
- [Data Studio](https://flipsidecrypto.xyz/edit)
- [What is dbt?](https://docs.getdbt.com/docs/introduction)
{% enddocs %}

View File

@ -1,87 +0,0 @@
{{ config (
materialized = "ephemeral"
) }}
WITH retry AS (
SELECT
contract_address,
GREATEST(
latest_call_block,
latest_event_block
) AS block_number,
total_interaction_count
FROM
{{ ref("silver__relevant_contracts") }}
r
LEFT JOIN {{ source(
'polygon_silver',
'verified_abis'
) }}
v USING (contract_address)
WHERE
r.total_interaction_count >= 250 -- high interaction count
AND GREATEST(
max_inserted_timestamp_logs,
max_inserted_timestamp_traces
) >= CURRENT_DATE - INTERVAL '30 days' -- recent activity
AND v.contract_address IS NULL -- no verified abi
AND r.contract_address NOT IN (
SELECT
contract_address
FROM
{{ source(
'polygon_bronze_api',
'contract_abis'
) }}
WHERE
_inserted_timestamp >= CURRENT_DATE - INTERVAL '30 days' -- this won't let us retry the same contract within 30 days
AND abi_data :data :result :: STRING <> 'Max rate limit reached'
)
ORDER BY
total_interaction_count DESC
LIMIT
25
), FINAL AS (
SELECT
proxy_address AS contract_address,
start_block AS block_number
FROM
{{ ref("silver__proxies") }}
p
JOIN retry r USING (contract_address)
LEFT JOIN {{ source(
'polygon_silver',
'verified_abis'
) }}
v
ON v.contract_address = p.proxy_address
WHERE
v.contract_address IS NULL
AND p.contract_address NOT IN (
SELECT
contract_address
FROM
{{ source(
'polygon_bronze_api',
'contract_abis'
) }}
WHERE
_inserted_timestamp >= CURRENT_DATE - INTERVAL '30 days' -- this won't let us retry the same contract within 30 days
AND abi_data :data :result :: STRING <> 'Max rate limit reached'
)
UNION ALL
SELECT
contract_address,
block_number
FROM
retry
)
SELECT
*
FROM
FINAL qualify ROW_NUMBER() over (
PARTITION BY contract_address
ORDER BY
block_number DESC
) = 1

View File

@ -1,79 +0,0 @@
{{ config(
materialized = 'incremental',
unique_key = "contract_address",
full_refresh = false,
tags = ['curated']
) }}
WITH base AS (
SELECT
contract_address
FROM
{{ ref('silver__relevant_contracts') }}
WHERE
total_interaction_count >= 100
{% if is_incremental() %}
and contract_address not in (
SELECT
contract_address
FROM
{{ this }}
WHERE
abi_data :data :result :: STRING <> 'Max rate limit reached'
)
{% endif %}
order by total_interaction_count desc
LIMIT
100
), all_contracts AS (
SELECT
contract_address
FROM
base
UNION
SELECT
contract_address
FROM
{{ ref('_retry_abis') }}
),
row_nos AS (
SELECT
contract_address,
ROW_NUMBER() over (
ORDER BY
contract_address
) AS row_no
FROM
all_contracts
),
batched AS ({% for item in range(151) %}
SELECT
rn.contract_address,
live.udf_api(
'GET',
CONCAT('https://api.polygonscan.com/api?module=contract&action=getabi&address=',rn.contract_address,'&apikey={key}'),
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', 'livequery'
),
null,
'Vault/prod/block_explorers/polygon_scan'
) AS abi_data,
SYSDATE() AS _inserted_timestamp
FROM
row_nos rn
WHERE
row_no = {{ item }}
{% if not loop.last %}
UNION ALL
{% endif %}
{% endfor %})
SELECT
contract_address,
abi_data,
_inserted_timestamp
FROM
batched

View File

@ -1,22 +0,0 @@
version: 2
models:
- name: bronze_api__contract_abis
columns:
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- VARCHAR
- dbt_expectations.expect_column_values_to_match_regex:
regex: "^(0x)[0-9a-fA-F]{40}$"

View File

@ -1,130 +0,0 @@
{{ config(
materialized = 'incremental',
unique_key = "contract_address",
full_refresh = false,
tags = ['non_realtime']
) }}
WITH base AS (
SELECT
contract_address,
latest_event_block AS latest_block
FROM
{{ ref('silver__relevant_contracts') }}
WHERE
total_event_count >= 25
{% if is_incremental() %}
AND contract_address NOT IN (
SELECT
contract_address
FROM
{{ this }}
)
{% endif %}
ORDER BY
total_event_count DESC
LIMIT
500
), function_sigs AS (
SELECT
'0x313ce567' AS function_sig,
'decimals' AS function_name
UNION
SELECT
'0x06fdde03',
'name'
UNION
SELECT
'0x95d89b41',
'symbol'
),
all_reads AS (
SELECT
*
FROM
base
JOIN function_sigs
ON 1 = 1
),
ready_reads AS (
SELECT
contract_address,
latest_block,
function_sig,
RPAD(
function_sig,
64,
'0'
) AS input,
utils.udf_json_rpc_call(
'eth_call',
[{'to': contract_address, 'from': null, 'data': input}, utils.udf_int_to_hex(latest_block)],
concat_ws(
'-',
contract_address,
input,
latest_block
)
) AS rpc_request
FROM
all_reads
),
batch_reads AS (
SELECT
ARRAY_AGG(rpc_request) AS batch_rpc_request
FROM
ready_reads
),
node_call AS (
SELECT
*,
live.udf_api(
'POST',
CONCAT(
'{Service}',
'/',
'{Authentication}'
),{},
batch_rpc_request,
'Vault/prod/polygon/quicknode/mainnet'
) AS response
FROM
batch_reads
WHERE
EXISTS (
SELECT
1
FROM
ready_reads
LIMIT
1
)
), flat_responses AS (
SELECT
VALUE :id :: STRING AS call_id,
VALUE :result :: STRING AS read_result
FROM
node_call,
LATERAL FLATTEN (
input => response :data
)
)
SELECT
SPLIT_PART(
call_id,
'-',
1
) AS contract_address,
SPLIT_PART(
call_id,
'-',
3
) AS block_number,
LEFT(SPLIT_PART(call_id, '-', 2), 10) AS function_sig,
NULL AS function_input,
read_result,
SYSDATE() :: TIMESTAMP AS _inserted_timestamp
FROM
flat_responses

View File

@ -1,18 +0,0 @@
version: 2
models:
- name: bronze_api__token_reads
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- CONTRACT_ADDRESS
- FUNCTION_SIG
columns:
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ

View File

@ -1,25 +0,0 @@
{{ config(
materialized = 'view'
) }}
SELECT
system_created_at,
insert_date,
blockchain,
address,
creator,
label_type,
label_subtype,
address_name,
project_name,
_is_deleted,
modified_timestamp,
labels_combined_id
FROM
{{ source(
'crosschain_silver',
'labels_combined'
) }}
WHERE
blockchain = 'polygon'
AND address LIKE '0x%'

View File

@ -1,80 +0,0 @@
{{ config (
materialized = "view",
tags = ['overflowed_traces']
) }}
{% for item in range(
1,
11
) %}
SELECT
o.file_name,
f.block_number,
f.index_vals,
f.path,
f.key,
f.value_
FROM
(
SELECT
file_name,
file_url,
index_cols,
[overflowed_block, overflowed_tx] AS index_vals
FROM
(
SELECT
block_number,
POSITION,
file_name,
file_url,
index_cols,
VALUE [0] AS overflowed_block,
VALUE [1] AS overflowed_tx,
block_number = overflowed_block
AND POSITION = overflowed_tx AS missing
FROM
(
SELECT
block_number,
POSITION,
file_name,
file_url,
index_cols,
utils.udf_detect_overflowed_responses(
file_url,
index_cols
) AS index_vals
FROM
{{ ref("bronze__potential_overflowed_traces") }}
WHERE
row_no = {{ item }}
),
LATERAL FLATTEN (
input => index_vals
)
)
WHERE
missing = TRUE
) o,
TABLE(
utils.udtf_flatten_overflowed_responses(
o.file_url,
o.index_cols,
[o.index_vals]
)
) f
WHERE
NOT IS_OBJECT(
f.value_
)
AND NOT IS_ARRAY(
f.value_
)
AND NOT IS_NULL_VALUE(
f.value_
) {% if not loop.last %}
UNION ALL
{% endif %}
{% endfor %}

View File

@ -1,80 +0,0 @@
{{ config (
materialized = "view",
tags = ['overflowed_traces']
) }}
WITH impacted_blocks AS (
SELECT
blocks_impacted_array
FROM
{{ ref("silver_observability__traces_completeness") }}
ORDER BY
test_timestamp DESC
LIMIT
1
), all_missing AS (
SELECT
DISTINCT VALUE :: INT AS block_number
FROM
impacted_blocks,
LATERAL FLATTEN (
input => blocks_impacted_array
)
),
all_txs AS (
SELECT
block_number,
POSITION AS tx_position,
tx_hash
FROM
{{ ref("silver__transactions") }}
JOIN all_missing USING (block_number)
),
missing_txs AS (
SELECT
DISTINCT txs.block_number,
txs.tx_position,
file_name
FROM
all_txs txs
LEFT JOIN {{ source(
"polygon_gold",
"fact_traces"
) }}
tr2 USING (
block_number,
tx_position
)
JOIN {{ ref("streamline__traces_complete") }} USING (block_number)
LEFT JOIN {{ source(
'polygon_silver',
'overflowed_traces'
) }}
ot USING (
block_number,
tx_position
)
WHERE
tr2.block_number IS NULL
AND ot.block_number IS NULL
)
SELECT
block_number,
tx_position AS POSITION,
file_name,
build_scoped_file_url(
@streamline.bronze.POLYGON_SERVERLESS_PROD,
file_name
) AS file_url,
['block_number', 'array_index'] AS index_cols,
ROW_NUMBER() over (
ORDER BY
block_number ASC,
POSITION ASC
) AS row_no
FROM
missing_txs
ORDER BY
block_number ASC,
POSITION ASC

View File

@ -1,29 +0,0 @@
{{ config (
materialized = 'view'
) }}
SELECT
asset_id,
symbol,
NAME,
decimals,
blockchain,
is_deprecated,
provider,
source,
_inserted_timestamp,
inserted_timestamp,
modified_timestamp,
complete_native_asset_metadata_id,
_invocation_id
FROM
{{ source(
'crosschain_silver',
'complete_native_asset_metadata'
) }}
WHERE
blockchain = 'polygon'
AND symbol IN (
'MATIC',
'POL'
)

View File

@ -1,32 +0,0 @@
{{ config (
materialized = 'view'
) }}
SELECT
HOUR,
asset_id,
symbol,
NAME,
decimals,
price,
blockchain,
is_imputed,
is_deprecated,
provider,
source,
_inserted_timestamp,
inserted_timestamp,
modified_timestamp,
complete_native_prices_id,
_invocation_id
FROM
{{ source(
'crosschain_silver',
'complete_native_prices'
) }}
WHERE
blockchain = 'polygon'
AND symbol IN (
'MATIC',
'POL'
)

View File

@ -1,26 +0,0 @@
{{ config (
materialized = 'view'
) }}
SELECT
asset_id,
token_address,
NAME,
symbol,
platform,
platform_id,
provider,
source,
_inserted_timestamp,
inserted_timestamp,
modified_timestamp,
complete_provider_asset_metadata_id,
_invocation_id
FROM
{{ source(
'crosschain_silver',
'complete_provider_asset_metadata'
) }}
WHERE
platform = 'Polygon'
-- platforms specific to Polygon

View File

@ -1,24 +0,0 @@
{{ config (
materialized = 'view'
) }}
SELECT
asset_id,
recorded_hour,
OPEN,
high,
low,
CLOSE,
provider,
source,
_inserted_timestamp,
inserted_timestamp,
modified_timestamp,
complete_provider_prices_id,
_invocation_id
FROM
{{ source(
'crosschain_silver',
'complete_provider_prices'
) }}
-- prices for all ids

View File

@ -1,28 +0,0 @@
{{ config (
materialized = 'view'
) }}
SELECT
token_address,
asset_id,
symbol,
NAME,
decimals,
blockchain,
blockchain_name,
blockchain_id,
is_deprecated,
provider,
source,
_inserted_timestamp,
inserted_timestamp,
modified_timestamp,
complete_token_asset_metadata_id,
_invocation_id
FROM
{{ source(
'crosschain_silver',
'complete_token_asset_metadata'
) }}
WHERE
blockchain = 'polygon'

View File

@ -1,31 +0,0 @@
{{ config (
materialized = 'view'
) }}
SELECT
HOUR,
token_address,
asset_id,
symbol,
NAME,
decimals,
price,
blockchain,
blockchain_name,
blockchain_id,
is_imputed,
is_deprecated,
provider,
source,
_inserted_timestamp,
inserted_timestamp,
modified_timestamp,
complete_token_prices_id,
_invocation_id
FROM
{{ source(
'crosschain_silver',
'complete_token_prices'
) }}
WHERE
blockchain = 'polygon'

View File

@ -1,5 +0,0 @@
{% docs borrow_symbol %}
The symbol of the asset/collateral that is payed or received, depending on the action
{% enddocs %}

View File

@ -1,9 +0,0 @@
{% docs borrow_action %}
The action that the user is taking.
Borrow: user is borrowing an asset
Repay: user is repaying the asset that they have borrowed in a previous loan
Add collateral: user is depositing collateral for their loan. This happens some times in the same transaction as the borrowing transaction and some times in a separate transaction.
Remove collateral: user is withdrawing collateral. This happens some times in the same transaction as the borrowing transaction and some times in a separate transaction.
{% enddocs %}

View File

@ -1,9 +0,0 @@
{% docs borrow_amount %}
The meaning depends on the action:
Borrow: The amount of the asset that the user is borrowing or
Repay: The amount of the asset that the user is repaying
Add collateral: The amount of collateral that the user is depositing
Remove collateral: The amount of collateral that the user is withdrawing
{% enddocs %}

View File

@ -1,8 +0,0 @@
{% docs borrow_amount_usd %}
The meaning depends on the action:
Borrow: The amount of the asset in USD that the user is borrowing or
Repay: The amount of the asset in USD that the user is repaying
Add collateral: The amount of collateral in USD that the user is depositing
Remove collateral: The amount of collateral in USD that the user is withdrawing
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs borrow_asset %}
The address of the asset/collateral token that is being borrowed/repayed/deposited etc. depending on the action
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs collateral_address %}
The address of the asset that is used for collateral when borrowing funds.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs collateral_symbol %}
The symbol of the asset that is used for collateral when borrowing funds.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_depositor %}
Its the address of the user who is depositing for lending or withdrawing, depending on the action.
{% enddocs %}

View File

@ -1,4 +0,0 @@
{% docs lending_asset_address %}
The address of the asset in the token pair. This asset is either deposited ot withdrawn for lending purposes.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_pool_address %}
The address of the lending pool. For sushi this will be the address of the kashi pair.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_symbol %}
The symbol of the asset that is lent or withdrawn, depending on the action
{% enddocs %}

View File

@ -1,7 +0,0 @@
{% docs lending_action %}
The action that the user is taking.
Deposit: user is depositing funds to be used for lending
Withdraw: user has changed their mind and are no longer willing to lend, so they withdraw their asset
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_amount %}
The amount of the asset that the user is depositing or withdrawing, depending on the action.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_amount_usd %}
The amount of the asset that the user is depositing or withdrawing, depending on the action.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_asset %}
The address of the asset (token) that is being deposited/withdrawn, depending on the action
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_borrower_is_a_contract %}
If the depositor of collateral is a contract then its a Yes, if the depositor of collateral is a normal address it is a No.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_lender_is_a_contract %}
If the depositor is a contract then its a Yes, if the depositor is a normal address it is a No.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_lending_pool %}
The name of the lending pool.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_origin_from_address %}
The address of the user who initiates the transaction.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_origin_to_address %}
The person who initiates the depositing transaction has to interact with this address. This address belongs to the lending platform or directs the transactio there.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs poly_block_header_json %}
This JSON column contains the block header details.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs poly_blockchain %}
The blockchain on which transactions are being confirmed.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs poly_blocks_hash %}
The hash of the block header for a given block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs poly_blocks_nonce %}
Block nonce is a value used during mining to demonstrate proof of work for a given block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs poly_blocks_table_doc %}
This table contains block level data for the Polygon Blockchain. This table can be used to analyze trends at a block level, for example gas fees vs. total transactions over time. For more information on EVM transactions, please see [Etherscan Resources](https://etherscan.io/directory/Learning_Resources/Ethereum) or [The Ethereum Organization](https://ethereum.org/en/developers/docs/blocks/)
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs poly_difficulty %}
The effort required to mine the block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs poly_extra_data %}
Any data included by the validator for a given block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs poly_gas_limit %}
Total gas limit provided by all transactions in the block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs poly_gas_used %}
Total gas used in the block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs poly_miner %}
Miner who successfully added a given block to the blockchain.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs poly_network %}
The network on the blockchain used by a transaction.
{% enddocs %}

Some files were not shown because too many files have changed in this diff Show More