AN-5990/arb-migration (#321)

* models - streamline - core , decoder

* delete bronze subfolders

* models - docs, gh

* silver - observablity, core, abis, prices, stats, nft transfers

* gold - stats, price, core, nft transfers

* sources

* tests, dbt project, packages, makefile, python folder, slack notify yml

* macros, and empty folders

* remove base

* update for fsc-evm changes - package, makefile, sources, dbt_project, workflows, docs

* gold tags

* silver bridge tags

* dex tags

* lending tags

* nft tags

* protocol tags

* temp package

* docs

* wh

* 48

---------

Co-authored-by: sam <sam@flipsidecrypto.com>
This commit is contained in:
drethereum 2025-05-08 10:03:03 -06:00 committed by GitHub
parent 44bf1c99c3
commit 288b86157a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
663 changed files with 1071 additions and 22856 deletions

View File

@ -0,0 +1,27 @@
name: dbt_alter_all_gha_tasks
run-name: dbt_alter_all_gha_tasks
on:
workflow_dispatch:
branches:
- "main"
inputs:
task_action:
type: choice
description: Action to perform on all tasks
required: true
options:
- RESUME
- SUSPEND
default: RESUME
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_alter_all_gha_tasks.yml@pre-release/v4-beta
with:
task_action: ${{ inputs.task_action }}
target: prod
secrets: inherit

View File

@ -1,53 +0,0 @@
name: dbt_alter_gha_task
run-name: dbt_alter_gha_task
on:
workflow_dispatch:
branches:
- "main"
inputs:
workflow_name:
type: string
description: Name of the workflow to perform the action on, no .yml extension
required: true
task_action:
type: choice
description: Action to perform
required: true
options:
- SUSPEND
- RESUME
default: SUSPEND
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_alter_gha_tasks.yml@AN-4374/upgrade-dbt-1.7
with:
workflow_name: |
${{ inputs.workflow_name }}
task_action: |
${{ inputs.task_action }}
environment: workflow_prod
secrets: inherit
notify-failure:
needs: [called_workflow_template]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -0,0 +1,32 @@
name: dbt_alter_gha_tasks
run-name: dbt_alter_gha_tasks
on:
workflow_dispatch:
branches:
- "main"
inputs:
workflow_name:
type: string
description: Name of the workflow to perform the action on, no .yml extension
required: true
task_action:
type: choice
description: Action to perform
required: true
options:
- SUSPEND
- RESUME
default: SUSPEND
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_alter_gha_tasks.yml@pre-release/v4-beta
with:
workflow_name: ${{ inputs.workflow_name }}
task_action: ${{ inputs.task_action }}
target: prod
secrets: inherit

View File

@ -0,0 +1,19 @@
name: dbt_deploy_new_workflows
run-name: dbt_deploy_new_workflows
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Deploy New Github Actions
command: |
make deploy_new_gha_tasks DBT_TARGET=prod
secrets: inherit

View File

@ -5,75 +5,10 @@ on:
branches:
- "main"
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: refresh ddl for datashare
run: |
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
- name: checkout docs branch
run: |
git checkout -B docs origin/main
- name: generate dbt docs
run: dbt docs generate -t prod
- name: move files to docs directory
run: |
mkdir -p ./docs
cp target/{catalog.json,manifest.json,index.html} docs/
- name: clean up target directory
run: dbt clean
- name: check for changes
run: git status
- name: stage changed files
run: git add .
- name: commit changed files
run: |
git config user.email "abc@xyz"
git config user.name "github-actions"
git commit -am "Auto-update docs"
- name: push changes to docs
run: |
git push -f --set-upstream origin docs
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_docs_update.yml@pre-release/v4-beta
secrets: inherit

View File

@ -3,39 +3,20 @@ run-name: ${{ github.event.inputs.branch }}
on:
workflow_dispatch:
branches:
- "main"
inputs:
environment:
required: true
type: string
required: true
concurrency: ${{ github.workflow }}
jobs:
prepare_vars:
runs-on: ubuntu-latest
environment:
name: ${{ inputs.environment }}
outputs:
warehouse: ${{ steps.set_outputs.outputs.warehouse }}
steps:
- name: Set warehouse output
id: set_outputs
run: |
echo "warehouse=${{ vars.WAREHOUSE }}" >> $GITHUB_OUTPUT
called_workflow_template:
needs: prepare_vars
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt.yml@main
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_integration_test.yml@pre-release/v4-beta
with:
command: >
target: ${{ inputs.environment }}
command: |
dbt test --selector 'integration_tests'
environment: ${{ inputs.environment }}
warehouse: ${{ needs.prepare_vars.outputs.warehouse }}
secrets: inherit
notify-failure:
needs: [called_workflow_template]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -1,56 +0,0 @@
name: dbt_run_abi_refresh
run-name: dbt_run_abi_refresh
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Update ABI Models
run: |
dbt run -m "arbitrum_models,tag:abis"
- name: Kick off decoded logs history, if there are new ABIs from users
run: |
dbt run-operation run_decoded_logs_history
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -1,12 +1,12 @@
name: dbt_run_adhoc
run-name: dbt_run_adhoc
run-name: ${{ inputs.dbt_command }}
on:
workflow_dispatch:
branches:
- "main"
inputs:
environment:
target:
type: choice
description: DBT Run Environment
required: true
@ -26,49 +26,18 @@ on:
default: DBT
dbt_command:
type: string
description: "DBT Run Command"
description: 'DBT Run Command'
required: true
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ inputs.warehouse }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_${{ inputs.environment }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
warehouse: ${{ inputs.warehouse }}
target: ${{ inputs.target }}
command_name: Run DBT Command
command: ${{ inputs.dbt_command }}
secrets: inherit

View File

@ -3,80 +3,13 @@ run-name: dbt_run_dev_refresh
on:
workflow_dispatch:
schedule:
# Runs "at 9:00 UTC on Monday" (see https://crontab.guru)
- cron: "0 9 * * 1"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs_refresh:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run-operation run_sp_create_prod_clone
notify-failure:
needs: [run_dbt_jobs_refresh]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
run_dbt_jobs_udfs:
runs-on: ubuntu-latest
needs: run_dbt_jobs_refresh
environment:
name: workflow_dev
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run Recreate UDFs
run: |
dbt run-operation fsc_utils.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
dbt run -s livequery_models.deploy.core._live --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
notify-failure2:
needs: [run_dbt_jobs_udfs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_dev_refresh.yml@pre-release/v4-beta
secrets: inherit

View File

@ -3,50 +3,21 @@ run-name: dbt_run_full_observability
on:
workflow_dispatch:
schedule:
# Runs “At 06:00 on day-of-month 1.” (see https://crontab.guru)
- cron: "0 6 1 * *"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod_2xl
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
warehouse: DBT_EMERGENCY
command_name: Run Observability Models
command: |
dbt run --threads 2 --vars '{"MAIN_OBSERV_FULL_TEST_ENABLED":True}' -m "fsc_evm,tag:observability"
secrets: inherit
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run --threads 2 --vars '{"OBSERV_FULL_TEST":True}' -m "arbitrum_models,tag:observability"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -3,50 +3,17 @@ run-name: dbt_run_heal_models
on:
workflow_dispatch:
schedule:
# Runs at 04:20 on Wednesday (see https://crontab.guru)
- cron: "20 4 * * 3"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m "arbitrum_models,tag:heal" --vars '{"HEAL_MODEL":True}'
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
command_name: Run Heal Models
command: |
dbt run -m "$PROJECT_NAME,tag:heal" --vars '{"HEAL_MODEL":True}'
secrets: inherit

View File

@ -1,59 +0,0 @@
name: dbt_run_operation_reorg
run-name: dbt_run_operation_reorg
on:
workflow_dispatch:
schedule:
# Runs at minute 35 once every week (see https://crontab.guru)
- cron: "35 0 * * 1"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: List reorg models
id: list_models
run: |
reorg_model_list=$(dbt list --select "arbitrum_models,tag:reorg" --resource-type model --output name | grep '__' | awk -F'.' '{print $NF}' | tr '\n' ',' | sed 's/,$//')
echo "model_list=$reorg_model_list" >> $GITHUB_OUTPUT
- name: Execute block_reorg macro
run: |
dbt run-operation fsc_utils.block_reorg --args "{reorg_model_list: '${{ steps.list_models.outputs.model_list }}', hours: '169'}" && awk '/SQL status/ {print; next} /DELETE FROM/{getline; print} /\/\* {/ {print}' logs/dbt.log
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -0,0 +1,22 @@
name: dbt_run_scheduled_abis
run-name: dbt_run_scheduled_abis
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run ABI Models
command: |
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:gold,tag:abis"
command_name_2: Kick off decoded logs history, if there are new user submitted ABIs
command_2: |
dbt run-operation fsc_evm.run_decoded_logs_history
secrets: inherit

View File

@ -6,46 +6,14 @@ on:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m "arbitrum_models,tag:curated"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
command_name: Run Curated Models
command: |
dbt run -m "$PROJECT_NAME,tag:curated" "fsc_evm,tag:curated"
secrets: inherit

View File

@ -0,0 +1,19 @@
name: dbt_run_scheduled_decoder
run-name: dbt_run_scheduled_decoder
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Decoder Models
command: |
dbt run -m "fsc_evm,tag:bronze,tag:decoded_logs" "fsc_evm,tag:silver,tag:decoded_logs" "fsc_evm,tag:gold,tag:decoded_logs"
secrets: inherit

View File

@ -0,0 +1,19 @@
name: dbt_run_scheduled_main
run-name: dbt_run_scheduled_main
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Main Models
command: |
dbt run -m "fsc_evm,tag:bronze,tag:core" "fsc_evm,tag:silver,tag:core" "fsc_evm,tag:gold,tag:core" "fsc_evm,tag:silver,tag:prices" "fsc_evm,tag:gold,tag:prices" "fsc_evm,tag:silver,tag:labels" "fsc_evm,tag:gold,tag:labels" "fsc_evm,tag:gold,tag:nft" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
secrets: inherit

View File

@ -1,51 +0,0 @@
name: dbt_run_scheduled_non_realtime
run-name: dbt_run_scheduled_non_realtime
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "arbitrum_models,tag:non_realtime" "arbitrum_models,tag:streamline_decoded_logs_complete" "arbitrum_models,tag:streamline_decoded_logs_realtime"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -0,0 +1,20 @@
name: dbt_run_scheduled_scores
run-name: dbt_run_scheduled_scores
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Scores Models
command: |
dbt run -m "fsc_evm,tag:scores"
secrets: inherit

View File

@ -6,50 +6,15 @@ on:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "arbitrum_models,tag:streamline_core_complete" "arbitrum_models,tag:streamline_core_realtime" "arbitrum_models,tag:streamline_core_complete_receipts" "arbitrum_models,tag:streamline_core_realtime_receipts" "arbitrum_models,tag:streamline_core_complete_confirm_blocks" "arbitrum_models,tag:streamline_core_realtime_confirm_blocks"
- name: Run Chainhead Tests
run: |
dbt test -m "arbitrum_models,tag:chainhead"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
command_name: Run Chainhead Models
command: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:chainhead" "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash"
dbt test -m "fsc_evm,tag:chainhead"
secrets: inherit

View File

@ -1,56 +0,0 @@
name: dbt_run_streamline_decoded_logs_history
run-name: dbt_run_streamline_decoded_logs_history
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Update complete table
run: |
dbt run -m "arbitrum_models,tag:streamline_decoded_logs_complete"
- name: Decode historical logs
run: |
dbt run-operation decoded_logs_history --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -1,51 +0,0 @@
name: dbt_run_streamline_decoder
run-name: dbt_run_streamline_decoder
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m "arbitrum_models,tag:decoded_logs"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -0,0 +1,22 @@
name: dbt_run_streamline_decoder_history
run-name: dbt_run_streamline_decoder_history
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Decoder Complete
command: |
dbt run -m "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete"
command_name_2: Run Streamline Decoder History
command_2: |
dbt run-operation fsc_evm.decoded_logs_history --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
secrets: inherit

View File

@ -0,0 +1,19 @@
name: dbt_run_streamline_history
run-name: dbt_run_streamline_history
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run History Models
command: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:history" --exclude "fsc_evm,tag:receipts_by_hash"
secrets: inherit

View File

@ -1,77 +0,0 @@
name: dbt_run_streamline_history_adhoc
run-name: dbt_run_streamline_history_adhoc
on:
workflow_dispatch:
branches:
- "main"
inputs:
environment:
type: choice
description: DBT Run Environment
required: true
options:
- dev
- prod
- prod_backfill
default: dev
warehouse:
type: choice
description: Snowflake warehouse
required: true
options:
- DBT
- DBT_CLOUD
- DBT_EMERGENCY
default: DBT
dbt_command:
type: choice
description: "DBT Run Command"
required: true
options:
- dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "arbitrum_models,tag:streamline_core_complete" "arbitrum_models,tag:streamline_core_history" "arbitrum_models,tag:streamline_core_complete_receipts" "arbitrum_models,tag:streamline_core_history_receipts" "arbitrum_models,tag:streamline_core_complete_confirm_blocks" "arbitrum_models,tag:streamline_core_history_confirm_blocks"
- dbt run --threads 8 --vars '{"STREAMLINE_INVOKE_STREAMS":True,"WAIT":120}' -m "arbitrum_models,tag:streamline_decoded_logs_complete" "arbitrum_models,tag:streamline_decoded_logs_history"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ inputs.warehouse }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_${{ inputs.environment }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -3,50 +3,21 @@ run-name: dbt_test_daily
on:
workflow_dispatch:
schedule:
# Runs "at 9:00 UTC" (see https://crontab.guru)
- cron: "0 9 * * *"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_test
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt test --exclude "arbitrum_models,tag:full_test" "arbitrum_models,tag:recent_test" livequery_models
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
target: test
command_name: Build Daily Testing Views
command: |
dbt run -m "fsc_evm,tag:daily_test"
command_name_2: Run Daily Tests (all tests excluding full, recent and misc. others)
command_2: |
dbt test --exclude "fsc_evm,tag:full_test" "fsc_evm,tag:recent_test" "fsc_evm,tag:gha_tasks" livequery_models
secrets: inherit

View File

@ -6,47 +6,16 @@ on:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_test
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m "arbitrum_models,tag:observability"
dbt test -m "arbitrum_models,tag:recent_test"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
target: test
command_name: Run Observability & Recent Tests
command: |
dbt run -m "fsc_evm,tag:observability"
dbt test -m "fsc_evm,tag:recent_test"
secrets: inherit

View File

@ -3,50 +3,21 @@ run-name: dbt_test_monthly
on:
workflow_dispatch:
schedule:
# Runs “28th of month at 6AM” (see https://crontab.guru)
- cron: "0 6 28 * *"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_test
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt test -m "arbitrum_models,tag:full_test"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
warehouse: DBT_EMERGENCY
command_name: Build Full Testing Views
command: |
dbt run -m "fsc_evm,tag:full_test"
command_name_2: Run Full Tests
command_2: |
dbt test -m "fsc_evm,tag:full_test"
secrets: inherit

View File

@ -1,27 +0,0 @@
name: Slack Notification
on:
workflow_call:
secrets:
SLACK_WEBHOOK_URL:
required: true
jobs:
notify:
runs-on: ubuntu-latest
environment: workflow_prod
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Install dependencies
run: pip install requests
- name: Send Slack notification
run: python python/slack_alert.py
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

111
Makefile
View File

@ -1,6 +1,109 @@
SHELL := /bin/bash
DBT_TARGET ?= dev
RECEIPTS_BY_HASH_ENABLED ?= false
dbt-console:
docker-compose run dbt_console
cleanup_time:
@set -e; \
rm -f package-lock.yml && dbt clean && dbt deps
.PHONY: dbt-console
deploy_gha_workflows_table:
@set -e; \
echo "Collecting workflow names..." ; \
WORKFLOW_VALUES="" ; \
for file in $$(find .github/workflows -name "*.yml" -type f); do \
filename=$$(basename "$$file" .yml) ; \
if [ -z "$$WORKFLOW_VALUES" ]; then \
WORKFLOW_VALUES="('$$filename')" ; \
else \
WORKFLOW_VALUES="$$WORKFLOW_VALUES,('$$filename')" ; \
fi ; \
done ; \
echo "Found workflows: $$WORKFLOW_VALUES" ; \
dbt run-operation create_workflow_table --args "{\"workflow_values\": \"$$WORKFLOW_VALUES\"}" -t $(DBT_TARGET)
deploy_gha_tasks:
@set -e; \
make deploy_gha_workflows_table DBT_TARGET=$(DBT_TARGET); \
dbt run -s livequery_models.deploy.marketplace.github --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET); \
dbt run-operation fsc_evm.create_gha_tasks --vars '{"RESUME_GHA_TASKS":True}' -t $(DBT_TARGET)
deploy_new_gha_tasks:
@set -e; \
make deploy_gha_workflows_table DBT_TARGET=$(DBT_TARGET); \
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET); \
dbt run-operation fsc_evm.create_gha_tasks --vars '{"RESUME_GHA_TASKS":True}' -t $(DBT_TARGET)
deploy_livequery:
@set -e; \
dbt run-operation fsc_evm.drop_livequery_schemas --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
dbt run -m livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
dbt run-operation fsc_evm.livequery_grants --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET)
deploy_chain_phase_1:
@set -e; \
dbt run -m livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
dbt run-operation fsc_evm.livequery_grants --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
dbt run-operation fsc_evm.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
dbt run-operation fsc_evm.call_sample_rpc_node -t $(DBT_TARGET); \
if [ "$(DBT_TARGET)" != "prod" ]; then \
if [ "$(RECEIPTS_BY_HASH_ENABLED)" = "true" ]; then \
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
dbt test -m "fsc_evm,tag:chainhead"; \
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True, "MAIN_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
else \
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts_by_hash" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
dbt test -m "fsc_evm,tag:chainhead"; \
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True, "MAIN_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
fi; \
else \
if [ "$(RECEIPTS_BY_HASH_ENABLED)" = "true" ]; then \
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts" --full-refresh --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
dbt test -m "fsc_evm,tag:chainhead"; \
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
else \
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts_by_hash" --full-refresh --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
dbt test -m "fsc_evm,tag:chainhead"; \
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
fi; \
fi; \
echo "# wait ~10 minutes"; \
echo "# run deploy_chain_phase_2"
deploy_chain_phase_2:
@set -e; \
if [ "$(DBT_TARGET)" != "prod" ]; then \
dbt run -m "fsc_evm,tag:phase_2" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_BRONZE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
else \
dbt run -m "fsc_evm,tag:phase_2" --full-refresh --vars '{"GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_BRONZE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
fi; \
echo "# wait ~10 minutes"; \
echo "# run deploy_chain_phase_3"
deploy_chain_phase_3:
@set -e; \
if [ "$(DBT_TARGET)" != "prod" ]; then \
dbt run -m "fsc_evm,tag:phase_2" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:phase_3" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
else \
dbt run -m "fsc_evm,tag:phase_2" -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:phase_3" --full-refresh --vars '{"GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
fi; \
echo "# wait ~10 minutes"; \
echo "# run deploy_chain_phase_4"
deploy_chain_phase_4:
@set -e; \
if [ "$(DBT_TARGET)" != "prod" ]; then \
dbt run -m "fsc_evm,tag:phase_3" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:phase_4" --full-refresh -t $(DBT_TARGET); \
else \
dbt run -m "fsc_evm,tag:phase_3" -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:phase_4" --full-refresh -t $(DBT_TARGET); \
make deploy_gha_tasks DBT_TARGET=$(DBT_TARGET); \
fi; \
.PHONY: cleanup_time deploy_gha_workflows_table deploy_gha_tasks deploy_new_gha_tasks deploy_livequery deploy_chain_phase_1 deploy_chain_phase_2 deploy_chain_phase_3 deploy_chain_phase_4

View File

@ -1,8 +0,0 @@
workflow_name,workflow_schedule
dbt_run_scheduled_non_realtime,"12 * * * *"
dbt_run_streamline_chainhead,"5,35 * * * *"
dbt_run_streamline_decoder,"23 * * * *"
dbt_run_scheduled_curated,"30 */6 * * *"
dbt_test_intraday,"30 */4 * * *"
dbt_run_streamline_decoded_logs_history,"5 11 * * 6"
dbt_run_abi_refresh,"10 23 * * *"
1 workflow_name workflow_schedule
2 dbt_run_scheduled_non_realtime 12 * * * *
3 dbt_run_streamline_chainhead 5,35 * * * *
4 dbt_run_streamline_decoder 23 * * * *
5 dbt_run_scheduled_curated 30 */6 * * *
6 dbt_test_intraday 30 */4 * * *
7 dbt_run_streamline_decoded_logs_history 5 11 * * 6
8 dbt_run_abi_refresh 10 23 * * *

View File

@ -17,6 +17,8 @@ test-paths: ["tests"]
seed-paths: ["data"]
macro-paths: ["macros"]
snapshot-paths: ["snapshots"]
docs-paths:
["dbt_packages/fsc_evm/doc_descriptions", "models/doc_descriptions", "models"]
target-path: "target" # directory which will store compiled SQL files
clean-targets: # directories to be removed by `dbt clean`
@ -24,13 +26,6 @@ clean-targets: # directories to be removed by `dbt clean`
- "dbt_modules"
- "dbt_packages"
models:
+copy_grants: true
+persist_docs:
relation: true
columns: true
+on_schema_change: "append_new_columns"
tests:
+store_failures: true # all tests
@ -39,7 +34,7 @@ on-run-start:
- "{{ create_udfs() }}"
on-run-end:
- '{{ apply_meta_as_tags(results) }}'
- "{{ apply_meta_as_tags(results) }}"
dispatch:
- macro_namespace: dbt
@ -49,12 +44,71 @@ dispatch:
- dbt
query-comment:
comment: '{{ dbt_snowflake_query_tags.get_query_comment(node) }}'
comment: "{{ dbt_snowflake_query_tags.get_query_comment(node) }}"
append: true # Snowflake removes prefixed comments.
# Configuring models
# Full documentation: https://docs.getdbt.com/docs/configuring-models
models:
arbitrum_models: # replace with the name of the chain
+copy_grants: true
+persist_docs:
relation: true
columns: true
+on_schema_change: "append_new_columns"
livequery_models:
+materialized: ephemeral
fsc_evm:
+enabled: false # disable fsc_evm package by default
+copy_grants: true
+persist_docs:
relation: true
columns: true
+on_schema_change: "append_new_columns"
main_package:
+enabled: false # disable top level package by default, enabled subpackages as needed
admin:
+enabled: true
core:
+enabled: true # enable subpackages, as needed
bronze:
+enabled: false
token_reads:
+enabled: true
github_actions:
+enabled: true
labels:
+enabled: true
observability:
+enabled: true
prices:
+enabled: true
utils:
+enabled: true
decoder_package:
+enabled: false
abis:
+enabled: true
decoded_logs:
+enabled: false
gold:
+enabled: true
silver:
+enabled: true
streamline:
+enabled: true
curated_package:
+enabled: false
protocols:
+enabled: false
vertex:
+enabled: true
stats:
+enabled: true
scores_package:
+enabled: false
# In this example config, we tell dbt to build all models in the example/ directory
# as tables. These settings can be overridden in the individual model files
# using the `{{ config(...) }}` macro.
@ -62,21 +116,19 @@ query-comment:
vars:
"dbt_date:time_zone": GMT
STREAMLINE_INVOKE_STREAMS: False
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
UPDATE_UDFS_AND_SPS: False
UPDATE_SNOWFLAKE_TAGS: True
WAIT: 0
OBSERV_FULL_TEST: False
HEAL_MODEL: False
HEAL_MODELS: []
START_GHA_TASKS: False
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
#### STREAMLINE 2.0 BEGIN ####
#### STREAMLINE 2.0 BEGIN ####
API_INTEGRATION: '{{ var("config")[target.name]["API_INTEGRATION"] if var("config")[target.name] else var("config")["dev"]["API_INTEGRATION"] }}'
EXTERNAL_FUNCTION_URI: '{{ var("config")[target.name]["EXTERNAL_FUNCTION_URI"] if var("config")[target.name] else var("config")["dev"]["EXTERNAL_FUNCTION_URI"] }}'
ROLES: |
["INTERNAL_DEV"]
config:
# The keys correspond to dbt profiles and are case sensitive
dev:
@ -93,62 +145,4 @@ vars:
- AWS_LAMBDA_ARBITRUM_API
- INTERNAL_DEV
- DBT_CLOUD_ARBITRUM
#### STREAMLINE 2.0 END ####
#### FSC_EVM BEGIN ####
# Visit https://github.com/FlipsideCrypto/fsc-evm/wiki for more information on required and optional variables
### GLOBAL VARIABLES BEGIN ###
## REQUIRED
GLOBAL_PROD_DB_NAME: 'arbitrum'
GLOBAL_NODE_SECRET_PATH: 'Vault/prod/arbitrum/quicknode/mainnet'
GLOBAL_NODE_URL: '{service}/{Authentication}'
GLOBAL_BLOCKS_PER_HOUR: 14200
GLOBAL_USES_STREAMLINE_V1: True
GLOBAL_USES_SINGLE_FLIGHT_METHOD: True
### GLOBAL VARIABLES END ###
### MAIN_PACKAGE VARIABLES BEGIN ###
### CORE ###
## REQUIRED
## OPTIONAL
# GOLD_FULL_REFRESH: True
# SILVER_FULL_REFRESH: True
# BRONZE_FULL_REFRESH: True
# BLOCKS_COMPLETE_FULL_REFRESH: True
# CONFIRM_BLOCKS_COMPLETE_FULL_REFRESH: True
# TRACES_COMPLETE_FULL_REFRESH: True
# RECEIPTS_COMPLETE_FULL_REFRESH: True
# TRANSACTIONS_COMPLETE_FULL_REFRESH: True
# BLOCKS_TRANSACTIONS_REALTIME_TESTING_LIMIT: 3
# BLOCKS_TRANSACTIONS_HISTORY_TESTING_LIMIT: 3
# TRACES_REALTIME_TESTING_LIMIT: 3
# TRACES_HISTORY_TESTING_LIMIT: 3
# ARBTRACE_BLOCK_HISTORY_TESTING_LIMIT: 3
# RECEIPTS_REALTIME_TESTING_LIMIT: 3
# RECEIPTS_HISTORY_TESTING_LIMIT: 3
# CONFIRM_BLOCKS_REALTIME_TESTING_LIMIT: 3
# CONFIRM_BLOCKS_HISTORY_TESTING_LIMIT: 3
### MAIN_PACKAGE VARIABLES END ###
### DECODER_PACKAGE VARIABLES BEGIN ###
## REQUIRED
## OPTIONAL
# DECODED_LOGS_COMPLETE_FULL_REFRESH: True
# DECODED_LOGS_REALTIME_TESTING_LIMIT: 3
# DECODED_LOGS_HISTORY_SQL_LIMIT: 1 #limit per monthly range
### DECODER_PACKAGE VARIABLES END ###
#### FSC_EVM END ####

View File

@ -1,17 +1,7 @@
{% macro create_udfs() %}
{% if var("UPDATE_UDFS_AND_SPS") %}
{% if var("UPDATE_UDFS_AND_SPS", false) %}
{% set sql %}
CREATE schema if NOT EXISTS silver;
{{ create_udtf_get_base_table(
schema = "streamline"
) }}
{{ create_udf_get_chainhead() }}
{{ create_udf_bulk_json_rpc() }}
{{ create_udf_bulk_get_traces() }}
{{ create_udf_decode_array_string() }}
{{ create_udf_decode_array_object() }}
{{ create_udf_bulk_decode_logs() }}
{% endset %}
{% do run_query(sql) %}
{{- fsc_utils.create_udfs() -}}

View File

@ -1,22 +0,0 @@
{% macro standard_predicate(
input_column = 'block_number'
) -%}
{%- set database_name = target.database -%}
{%- set schema_name = generate_schema_name(
node = model
) -%}
{%- set table_name = generate_alias_name(
node = model
) -%}
{%- set tmp_table_name = table_name ~ '__dbt_tmp' -%}
{%- set full_table_name = database_name ~ '.' ~ schema_name ~ '.' ~ table_name -%}
{%- set full_tmp_table_name = database_name ~ '.' ~ schema_name ~ '.' ~ tmp_table_name -%}
{{ full_table_name }}.{{ input_column }} >= (
SELECT
MIN(
{{ input_column }}
)
FROM
{{ full_tmp_table_name }}
)
{%- endmacro %}

View File

@ -1,124 +0,0 @@
{% macro decoded_logs_history(backfill_mode=false) %}
{%- set params = {
"sql_limit": var("DECODED_LOGS_HISTORY_SQL_LIMIT", 8000000),
"producer_batch_size": var("DECODED_LOGS_HISTORY_PRODUCER_BATCH_SIZE", 400000),
"worker_batch_size": var("DECODED_LOGS_HISTORY_WORKER_BATCH_SIZE", 100000)
} -%}
{% set wait_time = var("DECODED_LOGS_HISTORY_WAIT_TIME", 60) %}
{% set find_months_query %}
SELECT
DISTINCT date_trunc('month', block_timestamp)::date as month
FROM {{ ref('core__fact_blocks') }}
ORDER BY month ASC
{% endset %}
{% set results = run_query(find_months_query) %}
{% if execute %}
{% set months = results.columns[0].values() %}
{% for month in months %}
{% set view_name = 'decoded_logs_history_' ~ month.strftime('%Y_%m') %}
{% set create_view_query %}
create or replace view streamline.{{view_name}} as (
WITH target_blocks AS (
SELECT
block_number
FROM {{ ref('core__fact_blocks') }}
WHERE date_trunc('month', block_timestamp) = '{{month}}'::timestamp
),
new_abis AS (
SELECT
abi,
parent_contract_address,
event_signature,
start_block,
end_block
FROM {{ ref('silver__complete_event_abis') }}
{% if not backfill_mode %}
WHERE inserted_timestamp > dateadd('day', -30, sysdate())
{% endif %}
),
existing_logs_to_exclude AS (
SELECT _log_id
FROM {{ ref('streamline__decoded_logs_complete') }} l
INNER JOIN target_blocks b using (block_number)
),
candidate_logs AS (
SELECT
l.block_number,
l.tx_hash,
l.event_index,
l.contract_address,
l.topics,
l.data,
concat(l.tx_hash::string, '-', l.event_index::string) as _log_id
FROM target_blocks b
INNER JOIN {{ ref('core__fact_event_logs') }} l using (block_number)
WHERE l.tx_status = 'SUCCESS' and date_trunc('month', l.block_timestamp) = '{{month}}'::timestamp
)
SELECT
l.block_number,
l._log_id,
A.abi,
OBJECT_CONSTRUCT(
'topics', l.topics,
'data', l.data,
'address', l.contract_address
) AS data
FROM candidate_logs l
INNER JOIN new_abis A
ON A.parent_contract_address = l.contract_address
AND A.event_signature = l.topics[0]::STRING
AND l.block_number BETWEEN A.start_block AND A.end_block
WHERE NOT EXISTS (
SELECT 1
FROM existing_logs_to_exclude e
WHERE e._log_id = l._log_id
)
LIMIT {{ params.sql_limit }}
)
{% endset %}
{# Create the view #}
{% do run_query(create_view_query) %}
{{ log("Created view for month " ~ month.strftime('%Y-%m'), info=True) }}
{% if var("STREAMLINE_INVOKE_STREAMS", false) %}
{# Check if rows exist first #}
{% set check_rows_query %}
SELECT EXISTS(SELECT 1 FROM streamline.{{view_name}} LIMIT 1)
{% endset %}
{% set results = run_query(check_rows_query) %}
{% set has_rows = results.columns[0].values()[0] %}
{% if has_rows %}
{# Invoke streamline, if rows exist to decode #}
{% set decode_query %}
SELECT
streamline.udf_bulk_decode_logs_v2(
PARSE_JSON(
$${ "external_table": "decoded_logs",
"producer_batch_size": {{ params.producer_batch_size }},
"sql_limit": {{ params.sql_limit }},
"sql_source": "{{view_name}}",
"worker_batch_size": {{ params.worker_batch_size }} }$$
)
);
{% endset %}
{% do run_query(decode_query) %}
{{ log("Triggered decoding for month " ~ month.strftime('%Y-%m'), info=True) }}
{# Call wait since we actually did some decoding #}
{% do run_query("call system$wait(" ~ wait_time ~ ")") %}
{{ log("Completed wait after decoding for month " ~ month.strftime('%Y-%m'), info=True) }}
{% else %}
{{ log("No rows to decode for month " ~ month.strftime('%Y-%m'), info=True) }}
{% endif %}
{% endif %}
{% endfor %}
{% endif %}
{% endmacro %}

View File

@ -1,29 +0,0 @@
{% macro run_decoded_logs_history() %}
{% set blockchain = var('GLOBAL_PROD_DB_NAME','').lower() %}
{% set check_for_new_user_abis_query %}
select 1
from {{ ref('silver__user_verified_abis') }}
where _inserted_timestamp::date = sysdate()::date
and dayname(sysdate()) <> 'Sat'
{% endset %}
{% set results = run_query(check_for_new_user_abis_query) %}
{% if execute %}
{% set new_user_abis = results.columns[0].values()[0] %}
{% if new_user_abis %}
{% set invoke_workflow_query %}
SELECT
github_actions.workflow_dispatches(
'FlipsideCrypto',
'{{ blockchain }}' || '-models',
'dbt_run_streamline_decoded_logs_history.yml',
NULL
)
{% endset %}
{% do run_query(invoke_workflow_query) %}
{% endif %}
{% endif %}
{% endmacro %}

View File

@ -1,101 +0,0 @@
{% macro streamline_external_table_query_decoder(
source_name,
source_version
) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
WITH meta AS (
SELECT
job_created_time AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number,
TO_DATE(
concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5))
) AS _partition_by_created_date
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}')
) A
)
SELECT
block_number,
id :: STRING AS id,
DATA,
metadata,
b.file_name,
_inserted_timestamp,
s._partition_by_block_number AS _partition_by_block_number,
s._partition_by_created_date AS _partition_by_created_date
FROM
{{ source(
"bronze_streamline",
source_name ~ source_version
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date
WHERE
b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date
AND s._partition_by_created_date >= DATEADD('day', -2, CURRENT_TIMESTAMP())
AND DATA :error IS NULL
AND DATA IS NOT NULL
{% endmacro %}
{% macro streamline_external_table_query_decoder_fr(
source_name,
source_version
) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number,
TO_DATE(
concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5))
) AS _partition_by_created_date
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}'
)
) A
)
SELECT
block_number,
id :: STRING AS id,
DATA,
metadata,
b.file_name,
_inserted_timestamp,
s._partition_by_block_number AS _partition_by_block_number,
s._partition_by_created_date AS _partition_by_created_date
FROM
{{ source(
"bronze_streamline",
source_name ~ source_version
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date
WHERE
b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date
AND DATA :error IS NULL
AND DATA IS NOT NULL
{% endmacro %}

View File

@ -1,141 +0,0 @@
{% macro streamline_external_table_query(
source_name,
source_version,
partition_function,
balances,
block_number,
uses_receipts_by_hash
) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
WITH meta AS (
SELECT
job_created_time AS _inserted_timestamp,
file_name,
{{ partition_function }} AS partition_key
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}')
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp
{% if balances %},
r.block_timestamp :: TIMESTAMP AS block_timestamp
{% endif %}
{% if block_number %},
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
{% endif %}
{% if uses_receipts_by_hash %},
s.value :"TX_HASH" :: STRING AS tx_hash
{% endif %}
FROM
{{ source(
"bronze_streamline",
source_name ~ source_version
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.partition_key
{% if balances %}
JOIN {{ ref('_block_ranges') }}
r
ON r.block_number = COALESCE(
s.value :"BLOCK_NUMBER" :: INT,
s.value :"block_number" :: INT
)
{% endif %}
WHERE
b.partition_key = s.partition_key
AND DATA :error IS NULL
AND DATA IS NOT NULL
{% endmacro %}
{% macro streamline_external_table_query_fr(
source_name,
source_version,
partition_function,
partition_join_key,
balances,
block_number,
uses_receipts_by_hash
) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
{{ partition_function }} AS partition_key
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}'
)
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp
{% if balances %},
r.block_timestamp :: TIMESTAMP AS block_timestamp
{% endif %}
{% if block_number %},
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.value :"block_number" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
{% endif %}
{% if uses_receipts_by_hash %},
s.value :"TX_HASH" :: STRING AS tx_hash
{% endif %}
FROM
{{ source(
"bronze_streamline",
source_name ~ source_version
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.{{ partition_join_key }}
{% if balances %}
JOIN {{ ref('_block_ranges') }}
r
ON r.block_number = COALESCE(
s.value :"BLOCK_NUMBER" :: INT,
s.value :"block_number" :: INT
)
{% endif %}
WHERE
b.partition_key = s.{{ partition_join_key }}
AND DATA :error IS NULL
AND DATA IS NOT NULL
{% endmacro %}

View File

@ -1,36 +0,0 @@
{% macro log_bronze_details(source_name, source_version, model_type, partition_function, partition_join_key, block_number, uses_receipts_by_hash) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
{% if model_type != '' %}
{% set model_type = '_' ~ model_type %}
{% endif %}
{%- if flags.WHICH == 'compile' and execute -%}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log(source_name ~ model_type ~ '_PARTITION_FUNCTION: ' ~ partition_function, info=True) }}
{{ log(source_name ~ model_type ~ '_PARTITION_JOIN_KEY: ' ~ partition_join_key, info=True) }}
{{ log(source_name ~ model_type ~ '_BLOCK_NUMBER: ' ~ block_number, info=True) }}
{% if uses_receipts_by_hash %}
{{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }}
{% endif %}
{{ log("", info=True) }}
{{ log("=== Source Details ===", info=True) }}
{{ log("Source: " ~ source('bronze_streamline', source_name.lower() ~ source_version.lower()), info=True) }}
{{ log("", info=True) }}
{% set config_log = '\n' %}
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
{% set config_log = config_log ~ '\n{{ config (\n' %}
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
{% set config_log = config_log ~ ') }}\n' %}
{{ log(config_log, info=True) }}
{{ log("", info=True) }}
{%- endif -%}
{% endmacro %}

View File

@ -1,29 +0,0 @@
{% macro log_complete_details(post_hook, full_refresh_type, uses_receipts_by_hash) %}
{%- if flags.WHICH == 'compile' and execute -%}
{% if uses_receipts_by_hash %}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }}
{% endif %}
{% set config_log = '\n' %}
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
{% set config_log = config_log ~ '\n{{ config (\n' %}
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
{% set config_log = config_log ~ ' unique_key = "' ~ config.get('unique_key') ~ '",\n' %}
{% set config_log = config_log ~ ' cluster_by = "' ~ config.get('cluster_by') ~ '",\n' %}
{% set config_log = config_log ~ ' merge_update_columns = ' ~ config.get('merge_update_columns') | tojson ~ ',\n' %}
{% set config_log = config_log ~ ' post_hook = "' ~ post_hook ~ '",\n' %}
{% set config_log = config_log ~ ' incremental_predicates = ' ~ config.get('incremental_predicates') | tojson ~ ',\n' %}
{% set config_log = config_log ~ ' full_refresh = ' ~ full_refresh_type ~ ',\n' %}
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
{% set config_log = config_log ~ ') }}\n' %}
{{ log(config_log, info=True) }}
{{ log("", info=True) }}
{%- endif -%}
{% endmacro %}

View File

@ -1,36 +0,0 @@
{% macro log_model_details(vars=false, params=false) %}
{%- if execute -%}
/*
DBT Model Config:
{{ model.config | tojson(indent=2) }}
*/
{% if vars is not false %}
{% if var('LOG_MODEL_DETAILS', false) %}
{{ log( vars | tojson(indent=2), info=True) }}
{% endif %}
/*
Variables:
{{ vars | tojson(indent=2) }}
*/
{% endif %}
{% if params is not false %}
{% if var('LOG_MODEL_DETAILS', false) %}
{{ log( params | tojson(indent=2), info=True) }}
{% endif %}
/*
Parameters:
{{ params | tojson(indent=2) }}
*/
{% endif %}
/*
Raw Code:
{{ model.raw_code }}
*/
{%- endif -%}
{% endmacro %}

View File

@ -1,55 +0,0 @@
{% macro log_streamline_details(model_name, model_type, node_url, model_quantum_state, sql_limit, testing_limit, order_by_clause, new_build, streamline_params, uses_receipts_by_hash, method, method_params, min_block=0) %}
{%- if flags.WHICH == 'compile' and execute -%}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log("START_UP_BLOCK: " ~ min_block, info=True) }}
{{ log("", info=True) }}
{{ log("=== API Details ===", info=True) }}
{{ log("NODE_URL: " ~ node_url, info=True) }}
{{ log("NODE_SECRET_PATH: " ~ var('GLOBAL_NODE_SECRET_PATH'), info=True) }}
{{ log("", info=True) }}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_model_quantum_state').upper() ~ ': ' ~ model_quantum_state, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_sql_limit').upper() ~ ': ' ~ sql_limit, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_testing_limit').upper() ~ ': ' ~ testing_limit, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper() ~ ': ' ~ order_by_clause, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_new_build').upper() ~ ': ' ~ new_build, info=True) }}
{{ log('USES_RECEIPTS_BY_HASH' ~ ': ' ~ uses_receipts_by_hash, info=True) }}
{{ log("", info=True) }}
{{ log("=== RPC Details ===", info=True) }}
{{ log(model_name ~ ": {", info=True) }}
{{ log(" method: '" ~ method ~ "',", info=True) }}
{{ log(" method_params: " ~ method_params, info=True) }}
{{ log("}", info=True) }}
{{ log("", info=True) }}
{% set params_str = streamline_params | tojson %}
{% set params_formatted = params_str | replace('{', '{\n ') | replace('}', '\n }') | replace(', ', ',\n ') %}
{# Clean up the method_params formatting #}
{% set params_formatted = params_formatted | replace('"method_params": "', '"method_params": "') | replace('\\n', ' ') | replace('\\u0027', "'") %}
{% set config_log = '\n' %}
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
{% set config_log = config_log ~ '\n{{ config (\n' %}
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
{% set config_log = config_log ~ ' post_hook = fsc_utils.if_data_call_function_v2(\n' %}
{% set config_log = config_log ~ ' func = "streamline.udf_bulk_rest_api_v2",\n' %}
{% set config_log = config_log ~ ' target = "' ~ this.schema ~ '.' ~ this.identifier ~ '",\n' %}
{% set config_log = config_log ~ ' params = ' ~ params_formatted ~ '\n' %}
{% set config_log = config_log ~ ' ),\n' %}
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
{% set config_log = config_log ~ ') }}\n' %}
{{ log(config_log, info=True) }}
{{ log("", info=True) }}
{%- endif -%}
{% endmacro %}

View File

@ -1,47 +0,0 @@
{% macro set_default_variables_streamline(model_name, model_type) %}
{%- set node_url = var('GLOBAL_NODE_URL', '{Service}/{Authentication}') -%}
{%- set node_secret_path = var('GLOBAL_NODE_SECRET_PATH', '') -%}
{%- set model_quantum_state = var((model_name ~ '_' ~ model_type ~ '_quantum_state').upper(), 'streamline') -%}
{%- set testing_limit = var((model_name ~ '_' ~ model_type ~ '_testing_limit').upper(), none) -%}
{%- set new_build = var((model_name ~ '_' ~ model_type ~ '_new_build').upper(), false) -%}
{%- set default_order = 'ORDER BY partition_key DESC, block_number DESC' if model_type.lower() == 'realtime'
else 'ORDER BY partition_key ASC, block_number ASC' -%}
{%- set order_by_clause = var((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper(), default_order) -%}
{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%}
{%- set variables = {
'node_url': node_url,
'node_secret_path': node_secret_path,
'model_quantum_state': model_quantum_state,
'testing_limit': testing_limit,
'new_build': new_build,
'order_by_clause': order_by_clause,
'uses_receipts_by_hash': uses_receipts_by_hash
} -%}
{{ return(variables) }}
{% endmacro %}
{% macro set_default_variables_bronze(source_name, model_type) %}
{%- set partition_function = var(source_name ~ model_type ~ '_PARTITION_FUNCTION',
"CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER)")
-%}
{%- set partition_join_key = var(source_name ~ model_type ~ '_PARTITION_JOIN_KEY', 'partition_key') -%}
{%- set block_number = var(source_name ~ model_type ~ '_BLOCK_NUMBER', true) -%}
{%- set balances = var(source_name ~ model_type ~ '_BALANCES', false) -%}
{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%}
{%- set variables = {
'partition_function': partition_function,
'partition_join_key': partition_join_key,
'block_number': block_number,
'balances': balances,
'uses_receipts_by_hash': uses_receipts_by_hash
} -%}
{{ return(variables) }}
{% endmacro %}

View File

@ -1,63 +0,0 @@
{% macro set_streamline_parameters(model_name, model_type, multiplier=1) %}
{%- set rpc_config_details = {
"blocks_transactions": {
"method": 'eth_getBlockByNumber',
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE)',
"exploded_key": ['result', 'result.transactions']
},
"receipts_by_hash": {
"method": 'eth_getTransactionReceipt',
"method_params": 'ARRAY_CONSTRUCT(tx_hash)'
},
"receipts": {
"method": 'eth_getBlockReceipts',
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number))',
"exploded_key": ['result'],
"lambdas": 2
},
"traces": {
"method": 'debug_traceBlockByNumber',
"method_params": "ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s'))",
"exploded_key": ['result'],
"lambdas": 2
},
"confirm_blocks": {
"method": 'eth_getBlockByNumber',
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), FALSE)'
},
"arbtrace_block": {
"method": 'arbtrace_block',
"method_params": "ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s'))",
"exploded_key": ['result'],
"lambdas": 2
},
} -%}
{%- set rpc_config = rpc_config_details[model_name.lower()] -%}
{%- set params = {
"external_table": var((model_name ~ '_' ~ model_type ~ '_external_table').upper(), model_name.lower()),
"sql_limit": var((model_name ~ '_' ~ model_type ~ '_sql_limit').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier),
"producer_batch_size": var((model_name ~ '_' ~ model_type ~ '_producer_batch_size').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier),
"worker_batch_size": var(
(model_name ~ '_' ~ model_type ~ '_worker_batch_size').upper(),
(2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier) // (rpc_config.get('lambdas', 1))
),
"sql_source": (model_name ~ '_' ~ model_type).lower(),
"method": rpc_config['method'],
"method_params": rpc_config['method_params']
} -%}
{%- if rpc_config.get('exploded_key') is not none -%}
{%- do params.update({"exploded_key": tojson(rpc_config['exploded_key'])}) -%}
{%- endif -%}
{%- if rpc_config.get('lambdas') is not none -%}
{%- do params.update({"lambdas": rpc_config['lambdas']}) -%}
{%- endif -%}
{{ return(params) }}
{% endmacro %}

View File

@ -1,11 +0,0 @@
{% macro create_aws_arbitrum_api() %}
{% if target.name == "prod" %}
{% set sql %}
CREATE api integration IF NOT EXISTS aws_arbitrum_api api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/snowflake-api-arbitrum' api_allowed_prefixes = (
'https://sby17ramp4.execute-api.us-east-1.amazonaws.com/prod/',
'https://lz7pjsdoa4.execute-api.us-east-1.amazonaws.com/dev/'
) enabled = TRUE;
{% endset %}
{% do run_query(sql) %}
{% endif %}
{% endmacro %}

View File

@ -1,24 +0,0 @@
{% macro create_udtf_get_base_table(schema) %}
create or replace function {{ schema }}.udtf_get_base_table(max_height integer)
returns table (height number)
as
$$
with base as (
select
row_number() over (
order by
seq4()
) as id
from
table(generator(rowcount => 1000000000))
)
select
id as height
from
base
where
id <= max_height
$$
;
{% endmacro %}

View File

@ -1,65 +0,0 @@
{% macro create_udf_get_chainhead() %}
CREATE
OR REPLACE EXTERNAL FUNCTION streamline.udf_get_chainhead() returns variant api_integration = aws_arbitrum_api AS {% if target.name == "prod" %}
'https://sby17ramp4.execute-api.us-east-1.amazonaws.com/prod/get_chainhead'
{% else %}
'https://lz7pjsdoa4.execute-api.us-east-1.amazonaws.com/dev/get_chainhead'
{%- endif %};
{% endmacro %}
{% macro create_udf_bulk_json_rpc() %}
CREATE
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_json_rpc(
json variant
) returns text api_integration = aws_arbitrum_api AS {% if target.name == "prod" %}
'https://sby17ramp4.execute-api.us-east-1.amazonaws.com/prod/udf_bulk_json_rpc'
{% else %}
'https://lz7pjsdoa4.execute-api.us-east-1.amazonaws.com/dev/udf_bulk_json_rpc'
{%- endif %};
{% endmacro %}
{% macro create_udf_bulk_get_traces() %}
CREATE
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_get_traces(
json variant
) returns text api_integration = aws_arbitrum_api AS {% if target.name == "prod" %}
'https://sby17ramp4.execute-api.us-east-1.amazonaws.com/prod/udf_bulk_get_traces'
{% else %}
'https://lz7pjsdoa4.execute-api.us-east-1.amazonaws.com/dev/udf_bulk_get_traces'
{%- endif %};
{% endmacro %}
{% macro create_udf_decode_array_string() %}
CREATE
OR REPLACE EXTERNAL FUNCTION streamline.udf_decode(
abi ARRAY,
DATA STRING
) returns ARRAY api_integration = aws_arbitrum_api AS {% if target.name == "prod" %}
'https://sby17ramp4.execute-api.us-east-1.amazonaws.com/prod/decode_function'
{% else %}
'https://lz7pjsdoa4.execute-api.us-east-1.amazonaws.com/dev/decode_function'
{%- endif %};
{% endmacro %}
{% macro create_udf_decode_array_object() %}
CREATE
OR REPLACE EXTERNAL FUNCTION streamline.udf_decode(
abi ARRAY,
DATA OBJECT
) returns ARRAY api_integration = aws_arbitrum_api AS {% if target.name == "prod" %}
'https://sby17ramp4.execute-api.us-east-1.amazonaws.com/prod/decode_log'
{% else %}
'https://lz7pjsdoa4.execute-api.us-east-1.amazonaws.com/dev/decode_log'
{%- endif %};
{% endmacro %}
{% macro create_udf_bulk_decode_logs() %}
CREATE
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_decode_logs(
json OBJECT
) returns ARRAY api_integration = aws_arbitrum_api AS {% if target.name == "prod" %}
'https://sby17ramp4.execute-api.us-east-1.amazonaws.com/prod/bulk_decode_logs'
{% else %}
'https://lz7pjsdoa4.execute-api.us-east-1.amazonaws.com/dev/bulk_decode_logs'
{%- endif %};
{% endmacro %}

View File

@ -1,25 +0,0 @@
{% test missing_decoded_logs(model) %}
SELECT
l.block_number,
CONCAT(
l.tx_hash,
'-',
l.event_index
) AS _log_id
FROM
{{ ref('core__fact_event_logs') }}
l
LEFT JOIN {{ model }}
d
ON l.block_number = d.block_number
AND CONCAT(
l.tx_hash,
'-',
l.event_index
) = d._log_id
WHERE
l.contract_address = LOWER('0x82aF49447D8a07e3bd95BD0d56f35241523fBab1') -- WETH
AND l.topics [0] :: STRING = '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef' -- Transfer
AND l.block_timestamp BETWEEN DATEADD('hour', -48, SYSDATE())
AND DATEADD('hour', -6, SYSDATE())
AND d._log_id IS NULL {% endtest %}

View File

@ -1,135 +0,0 @@
{% macro missing_txs(
model
) %}
WITH txs_base AS (
SELECT
block_number AS base_block_number,
tx_hash AS base_tx_hash
FROM
{{ ref('test_silver__transactions_full') }}
),
model_name AS (
SELECT
block_number AS model_block_number,
tx_hash AS model_tx_hash
FROM
{{ model }}
)
SELECT
base_block_number,
base_tx_hash,
model_block_number,
model_tx_hash
FROM
txs_base
LEFT JOIN model_name
ON base_block_number = model_block_number
AND base_tx_hash = model_tx_hash
WHERE
(
model_tx_hash IS NULL
OR model_block_number IS NULL
)
AND base_block_number NOT IN (
SELECT
block_number
FROM
{{ ref('silver_observability__excluded_receipt_blocks') }}
)
{% endmacro %}
{% macro recent_missing_txs(
model
) %}
WITH txs_base AS (
SELECT
block_number AS base_block_number,
tx_hash AS base_tx_hash
FROM
{{ ref('test_silver__transactions_recent') }}
),
model_name AS (
SELECT
block_number AS model_block_number,
tx_hash AS model_tx_hash
FROM
{{ model }}
)
SELECT
base_block_number,
base_tx_hash,
model_block_number,
model_tx_hash
FROM
txs_base
LEFT JOIN model_name
ON base_block_number = model_block_number
AND base_tx_hash = model_tx_hash
WHERE
model_tx_hash IS NULL
OR model_block_number IS NULL
{% endmacro %}
{% macro missing_confirmed_txs(
model1,
model2
) %}
WITH txs_base AS (
SELECT
block_number AS base_block_number,
block_hash AS base_block_hash,
tx_hash AS base_tx_hash
FROM
{{ model1 }}
),
model_name AS (
SELECT
block_number AS model_block_number,
block_hash AS model_block_hash,
tx_hash AS model_tx_hash
FROM
{{ model2 }}
)
SELECT
DISTINCT base_block_number AS block_number
FROM
txs_base
LEFT JOIN model_name
ON base_block_number = model_block_number
AND base_tx_hash = model_tx_hash
AND base_block_hash = model_block_hash
WHERE
model_tx_hash IS NULL
AND model_block_number <= (
SELECT
MAX(base_block_number)
FROM
txs_base
)
AND base_block_number NOT IN (
SELECT
block_number
FROM
{{ ref('silver_observability__excluded_receipt_blocks') }}
)
{% endmacro %}
{% macro missing_traces(
model1,
model2
) %}
SELECT
DISTINCT block_number
FROM
{{ model1 }}
tx
LEFT JOIN {{ model2 }}
tr USING (
block_number,
tx_hash
)
WHERE
tr.tx_hash IS NULL
AND tx.to_address <> '0x000000000000000000000000000000000000006e'
and tx.block_number > 22207817
{% endmacro %}

View File

@ -1,47 +0,0 @@
{% test vertex_missing_products(
model,
filter) %}
with recent_records as (
select * from {{model}}
where modified_timestamp >= SYSDATE() - INTERVAL '12 hours'
),
invalid_product_ids as (
select distinct product_id
from {{ ref('silver__vertex_dim_products') }}
where product_id not in (select product_id from recent_records)
AND block_timestamp < sysdate() - INTERVAL '2 days'
{% if filter %}
AND {{ filter }}
{% endif %}
)
select *
from invalid_product_ids
{% endtest %}
{% test vertex_product_level_recency(
model,
filter) %}
with recent_records as (
select distinct(product_id) from {{model}}
where block_timestamp >= SYSDATE() - INTERVAL '7 days'
),
invalid_product_ids as (
select *
from {{ ref('silver__vertex_dim_products') }}
where product_id not in (select product_id from recent_records)
AND block_timestamp < sysdate() - INTERVAL '2 days'
{% if filter %}
AND {{ filter }}
{% endif %}
)
select *
from invalid_product_ids
{% endtest %}

View File

@ -1,78 +0,0 @@
{% macro if_data_call_function(
func,
target
) %}
{% if var(
"STREAMLINE_INVOKE_STREAMS"
) %}
{% if execute %}
{{ log(
"Running macro `if_data_call_function`: Calling udf " ~ func ~ " on " ~ target,
True
) }}
{% endif %}
SELECT
{{ func }}
WHERE
EXISTS(
SELECT
1
FROM
{{ target }}
LIMIT
1
)
{% else %}
{% if execute %}
{{ log(
"Running macro `if_data_call_function`: NOOP",
False
) }}
{% endif %}
SELECT
NULL
{% endif %}
{% endmacro %}
{% macro if_data_call_wait() %}
{% if var(
"STREAMLINE_INVOKE_STREAMS"
) %}
{% set query %}
SELECT
1
WHERE
EXISTS(
SELECT
1
FROM
{{ model.schema ~ "." ~ model.alias }}
LIMIT
1
) {% endset %}
{% if execute %}
{% set results = run_query(
query
) %}
{% if results %}
{{ log(
"Waiting...",
info = True
) }}
{% set wait_query %}
SELECT
system$wait(
{{ var(
"WAIT",
600
) }}
) {% endset %}
{% do run_query(wait_query) %}
{% else %}
SELECT
NULL;
{% endif %}
{% endif %}
{% endif %}
{% endmacro %}

View File

@ -1,87 +0,0 @@
{{ config (
materialized = "ephemeral"
) }}
WITH retry AS (
SELECT
contract_address,
GREATEST(
latest_call_block,
latest_event_block
) AS block_number,
total_interaction_count
FROM
{{ ref("silver__relevant_contracts") }}
r
LEFT JOIN {{ source(
'arbitrum_silver',
'verified_abis'
) }}
v USING (contract_address)
WHERE
r.total_interaction_count >= 250 -- high interaction count
AND GREATEST(
max_inserted_timestamp_logs,
max_inserted_timestamp_traces
) >= CURRENT_DATE - INTERVAL '30 days' -- recent activity
AND v.contract_address IS NULL -- no verified abi
AND r.contract_address NOT IN (
SELECT
contract_address
FROM
{{ source(
'arbitrum_bronze_api',
'contract_abis'
) }}
WHERE
_inserted_timestamp >= CURRENT_DATE - INTERVAL '30 days' -- this won't let us retry the same contract within 30 days
AND abi_data :data :result :: STRING <> 'Max rate limit reached'
)
ORDER BY
total_interaction_count DESC
LIMIT
25
), FINAL AS (
SELECT
proxy_address AS contract_address,
start_block AS block_number
FROM
{{ ref("silver__proxies") }}
p
JOIN retry r USING (contract_address)
LEFT JOIN {{ source(
'arbitrum_silver',
'verified_abis'
) }}
v
ON v.contract_address = p.proxy_address
WHERE
v.contract_address IS NULL
AND p.contract_address NOT IN (
SELECT
contract_address
FROM
{{ source(
'arbitrum_bronze_api',
'contract_abis'
) }}
WHERE
_inserted_timestamp >= CURRENT_DATE - INTERVAL '30 days' -- this won't let us retry the same contract within 30 days
AND abi_data :data :result :: STRING <> 'Max rate limit reached'
)
UNION ALL
SELECT
contract_address,
block_number
FROM
retry
)
SELECT
*
FROM
FINAL qualify ROW_NUMBER() over (
PARTITION BY contract_address
ORDER BY
block_number DESC
) = 1

View File

@ -1,79 +0,0 @@
{{ config(
materialized = 'incremental',
unique_key = "contract_address",
full_refresh = false,
tags = ['curated']
) }}
WITH base AS (
SELECT
contract_address
FROM
{{ ref('silver__relevant_contracts') }}
WHERE
total_interaction_count >= 100
{% if is_incremental() %}
and contract_address not in (
SELECT
contract_address
FROM
{{ this }}
WHERE
abi_data :data :result :: STRING <> 'Max rate limit reached'
)
{% endif %}
order by total_interaction_count desc
LIMIT
100
), all_contracts AS (
SELECT
contract_address
FROM
base
UNION
SELECT
contract_address
FROM
{{ ref('_retry_abis') }}
),
row_nos AS (
SELECT
contract_address,
ROW_NUMBER() over (
ORDER BY
contract_address
) AS row_no
FROM
all_contracts
),
batched AS ({% for item in range(151) %}
SELECT
rn.contract_address,
live.udf_api(
'GET',
CONCAT('https://api.arbiscan.io/api?module=contract&action=getabi&address=', rn.contract_address, '&apikey={key}'),
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', 'livequery'
),
NULL,
'Vault/prod/block_explorers/arbitrum_scan'
)AS abi_data,
SYSDATE() AS _inserted_timestamp
FROM
row_nos rn
WHERE
row_no = {{ item }}
{% if not loop.last %}
UNION ALL
{% endif %}
{% endfor %})
SELECT
contract_address,
abi_data,
_inserted_timestamp
FROM
batched

View File

@ -1,22 +0,0 @@
version: 2
models:
- name: bronze_api__contract_abis
columns:
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- VARCHAR
- dbt_expectations.expect_column_values_to_match_regex:
regex: "^(0x)[0-9a-fA-F]{40}$"

View File

@ -1,130 +0,0 @@
{{ config(
materialized = 'incremental',
unique_key = "contract_address",
full_refresh = false,
tags = ['non_realtime']
) }}
WITH base AS (
SELECT
contract_address,
latest_event_block AS latest_block
FROM
{{ ref('silver__relevant_contracts') }}
WHERE
total_event_count >= 25
{% if is_incremental() %}
AND contract_address NOT IN (
SELECT
contract_address
FROM
{{ this }}
)
{% endif %}
ORDER BY
total_event_count DESC
LIMIT
200
), function_sigs AS (
SELECT
'0x313ce567' AS function_sig,
'decimals' AS function_name
UNION
SELECT
'0x06fdde03',
'name'
UNION
SELECT
'0x95d89b41',
'symbol'
),
all_reads AS (
SELECT
*
FROM
base
JOIN function_sigs
ON 1 = 1
),
ready_reads AS (
SELECT
contract_address,
latest_block,
function_sig,
RPAD(
function_sig,
64,
'0'
) AS input,
utils.udf_json_rpc_call(
'eth_call',
[{'to': contract_address, 'from': null, 'data': input}, utils.udf_int_to_hex(latest_block)],
concat_ws(
'-',
contract_address,
input,
latest_block
)
) AS rpc_request
FROM
all_reads
),
batch_reads AS (
SELECT
ARRAY_AGG(rpc_request) AS batch_rpc_request
FROM
ready_reads
),
node_call AS (
SELECT
*,
live.udf_api(
'POST',
CONCAT(
'{service}',
'/',
'{Authentication}'
),{},
batch_rpc_request,
'Vault/prod/arbitrum/quicknode/mainnet'
) AS response
FROM
batch_reads
WHERE
EXISTS (
SELECT
1
FROM
ready_reads
LIMIT
1
)
), flat_responses AS (
SELECT
VALUE :id :: STRING AS call_id,
VALUE :result :: STRING AS read_result
FROM
node_call,
LATERAL FLATTEN (
input => response :data
)
)
SELECT
SPLIT_PART(
call_id,
'-',
1
) AS contract_address,
SPLIT_PART(
call_id,
'-',
3
) AS block_number,
LEFT(SPLIT_PART(call_id, '-', 2), 10) AS function_sig,
NULL AS function_input,
read_result,
SYSDATE() :: TIMESTAMP AS _inserted_timestamp
FROM
flat_responses

View File

@ -1,18 +0,0 @@
version: 2
models:
- name: bronze_api__token_reads
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- CONTRACT_ADDRESS
- FUNCTION_SIG
columns:
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ

View File

@ -1,25 +0,0 @@
{{ config(
materialized = 'view'
) }}
SELECT
system_created_at,
insert_date,
blockchain,
address,
creator,
label_type,
label_subtype,
address_name,
project_name,
_is_deleted,
modified_timestamp,
labels_combined_id
FROM
{{ source(
'crosschain_silver',
'labels_combined'
) }}
WHERE
blockchain = 'arbitrum'
AND address LIKE '0x%'

View File

@ -1,26 +0,0 @@
{{ config (
materialized = 'view'
) }}
SELECT
asset_id,
symbol,
NAME,
decimals,
blockchain,
is_deprecated,
provider,
source,
_inserted_timestamp,
inserted_timestamp,
modified_timestamp,
complete_native_asset_metadata_id,
_invocation_id
FROM
{{ source(
'crosschain_silver',
'complete_native_asset_metadata'
) }}
WHERE
blockchain = 'ethereum'
AND symbol = 'ETH'

View File

@ -1,29 +0,0 @@
{{ config (
materialized = 'view'
) }}
SELECT
HOUR,
asset_id,
symbol,
NAME,
decimals,
price,
blockchain,
is_imputed,
is_deprecated,
provider,
source,
_inserted_timestamp,
inserted_timestamp,
modified_timestamp,
complete_native_prices_id,
_invocation_id
FROM
{{ source(
'crosschain_silver',
'complete_native_prices'
) }}
WHERE
blockchain = 'ethereum'
AND symbol = 'ETH'

View File

@ -1,26 +0,0 @@
{{ config (
materialized = 'view'
) }}
SELECT
asset_id,
token_address,
NAME,
symbol,
platform,
platform_id,
provider,
source,
_inserted_timestamp,
inserted_timestamp,
modified_timestamp,
complete_provider_asset_metadata_id,
_invocation_id
FROM
{{ source(
'crosschain_silver',
'complete_provider_asset_metadata'
) }}
WHERE
platform = 'Arbitrum'
-- platforms specific to Arbitrum

View File

@ -1,24 +0,0 @@
{{ config (
materialized = 'view'
) }}
SELECT
asset_id,
recorded_hour,
OPEN,
high,
low,
CLOSE,
provider,
source,
_inserted_timestamp,
inserted_timestamp,
modified_timestamp,
complete_provider_prices_id,
_invocation_id
FROM
{{ source(
'crosschain_silver',
'complete_provider_prices'
) }}
-- prices for all ids

View File

@ -1,28 +0,0 @@
{{ config (
materialized = 'view'
) }}
SELECT
token_address,
asset_id,
symbol,
NAME,
decimals,
blockchain,
blockchain_name,
blockchain_id,
is_deprecated,
provider,
source,
_inserted_timestamp,
inserted_timestamp,
modified_timestamp,
complete_token_asset_metadata_id,
_invocation_id
FROM
{{ source(
'crosschain_silver',
'complete_token_asset_metadata'
) }}
WHERE
blockchain = 'arbitrum'

View File

@ -1,31 +0,0 @@
{{ config (
materialized = 'view'
) }}
SELECT
HOUR,
token_address,
asset_id,
symbol,
NAME,
decimals,
price,
blockchain,
blockchain_name,
blockchain_id,
is_imputed,
is_deprecated,
provider,
source,
_inserted_timestamp,
inserted_timestamp,
modified_timestamp,
complete_token_prices_id,
_invocation_id
FROM
{{ source(
'crosschain_silver',
'complete_token_prices'
) }}
WHERE
blockchain = 'arbitrum'

View File

@ -1,5 +0,0 @@
{% docs borrow_symbol %}
The symbol of the asset/collateral that is payed or received, depending on the action
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs borrower %}
Its the address of the user who is Borrowing or repaying the loan, depending on the action.
{% enddocs %}

View File

@ -1,9 +0,0 @@
{% docs borrow_action %}
The action that the user is taking.
Borrow: user is borrowing an asset
Repay: user is repaying the asset that they have borrowed in a previous loan
Add collateral: user is depositing collateral for their loan. This happens some times in the same transaction as the borrowing transaction and some times in a separate transaction.
Remove collateral: user is withdrawing collateral. This happens some times in the same transaction as the borrowing transaction and some times in a separate transaction.
{% enddocs %}

View File

@ -1,9 +0,0 @@
{% docs borrow_amount %}
The meaning depends on the action:
Borrow: The amount of the asset that the user is borrowing or
Repay: The amount of the asset that the user is repaying
Add collateral: The amount of collateral that the user is depositing
Remove collateral: The amount of collateral that the user is withdrawing
{% enddocs %}

View File

@ -1,8 +0,0 @@
{% docs borrow_amount_usd %}
The meaning depends on the action:
Borrow: The amount of the asset in USD that the user is borrowing or
Repay: The amount of the asset in USD that the user is repaying
Add collateral: The amount of collateral in USD that the user is depositing
Remove collateral: The amount of collateral in USD that the user is withdrawing
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs borrow_asset %}
The address of the asset/collateral token that is being borrowed/repayed/deposited etc. depending on the action
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs collateral_address %}
The address of the asset that is used for collateral when borrowing funds.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs collateral_symbol %}
The symbol of the asset that is used for collateral when borrowing funds.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_depositor %}
Its the address of the user who is depositing for lending or withdrawing, depending on the action.
{% enddocs %}

View File

@ -1,4 +0,0 @@
{% docs lending_asset_address %}
The address of the asset in the token pair. This asset is either deposited ot withdrawn for lending purposes.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_pool_address %}
The address of the lending pool. For sushi this will be the address of the kashi pair.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_symbol %}
The symbol of the asset that is lent or withdrawn, depending on the action
{% enddocs %}

View File

@ -1,7 +0,0 @@
{% docs lending_action %}
The action that the user is taking.
Deposit: user is depositing funds to be used for lending
Withdraw: user has changed their mind and are no longer willing to lend, so they withdraw their asset
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_amount %}
The amount of the asset that the user is depositing or withdrawing, depending on the action.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_amount_usd %}
The amount of the asset that the user is depositing or withdrawing, depending on the action.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_asset %}
The address of the asset (token) that is being deposited/withdrawn, depending on the action
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_borrower_is_a_contract %}
If the depositor of collateral is a contract then its a Yes, if the depositor of collateral is a normal address it is a No.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_lender_is_a_contract %}
If the depositor is a contract then its a Yes, if the depositor is a normal address it is a No.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_lending_pool %}
The name of the lending pool.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_origin_from_address %}
The address of the user who initiates the transaction.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs lending_origin_to_address %}
The person who initiates the depositing transaction has to interact with this address. This address belongs to the lending platform or directs the transactio there.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs arb_block_header_json %}
This JSON column contains the block header details.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs arb_blockchain %}
The blockchain on which transactions are being confirmed.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs arb_blocks_hash %}
The hash of the block header for a given block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs arb_blocks_nonce %}
Block nonce is a value used during mining to demonstrate proof of work for a given block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs arb_blocks_table_doc %}
This table contains block level data for the Arbitrum Blockchain. This table can be used to analyze trends at a block level, for example gas fees vs. total transactions over time. For more information on EVM transactions, please see [Etherscan Resources](https://etherscan.io/directory/Learning_Resources/Ethereum) or [The Ethereum Organization](https://ethereum.org/en/developers/docs/blocks/)
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs arb_difficulty %}
The effort required to mine the block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs arb_extra_data %}
Any data included by the validator for a given block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs arb_gas_limit %}
Total gas limit provided by all transactions in the block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs arb_gas_used %}
Total gas used in the block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs arb_miner %}
Miner who successfully added a given block to the blockchain.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs arb_network %}
The network on the blockchain used by a transaction.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs arb_parent_hash %}
The hash of the block from which a given block is generated. Also known as the parent block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs arb_receipts_root %}
The root of the state trie.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs arb_sha3_uncles %}
The mechanism which Ethereum Javascript RLP encodes an empty string.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs arb_size %}
Block size, which is determined by a given block's gas limit.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs arb_total_difficulty %}
Total difficulty of the chain at a given block.
{% enddocs %}

View File

@ -1,5 +0,0 @@
{% docs arb_tx_count %}
Total number of transactions within a block.
{% enddocs %}

Some files were not shown because too many files have changed in this diff Show More