mirror of
https://github.com/FlipsideCrypto/optimism-models.git
synced 2026-02-06 09:16:42 +00:00
AN-5991/op-migration (#340)
* streamline bronze * silver models * gold tables * docs, gha * bronze folders * macros * make file, packages, project yml, delete tests * remove silver nft transfers ref * sources * macro tests * update for fsc-evm changes - package, makefile, sources, dbt_project, workflows, docs * gold tags * add ethereum models back in * silver bridge tags * dex tags * lending tags * nft tags * added back protocols folder and tagged * temp package * docs * docs * wh * observability exclusions * 48 * v51 * 52 * 53 --------- Co-authored-by: sam <sam@flipsidecrypto.com>
This commit is contained in:
parent
25e9c57b2d
commit
6623a042f3
27
.github/workflows/dbt_alter_all_gha_tasks.yml
vendored
Normal file
27
.github/workflows/dbt_alter_all_gha_tasks.yml
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
name: dbt_alter_all_gha_tasks
|
||||
run-name: dbt_alter_all_gha_tasks
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
task_action:
|
||||
type: choice
|
||||
description: Action to perform on all tasks
|
||||
required: true
|
||||
options:
|
||||
- RESUME
|
||||
- SUSPEND
|
||||
default: RESUME
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_alter_all_gha_tasks.yml@pre-release/v4-beta
|
||||
with:
|
||||
task_action: ${{ inputs.task_action }}
|
||||
target: prod
|
||||
secrets: inherit
|
||||
53
.github/workflows/dbt_alter_gha_task.yml
vendored
53
.github/workflows/dbt_alter_gha_task.yml
vendored
@ -1,53 +0,0 @@
|
||||
name: dbt_alter_gha_task
|
||||
run-name: dbt_alter_gha_task
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
workflow_name:
|
||||
type: string
|
||||
description: Name of the workflow to perform the action on, no .yml extension
|
||||
required: true
|
||||
task_action:
|
||||
type: choice
|
||||
description: Action to perform
|
||||
required: true
|
||||
options:
|
||||
- SUSPEND
|
||||
- RESUME
|
||||
default: SUSPEND
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_alter_gha_tasks.yml@AN-4374/upgrade-dbt-1.7
|
||||
with:
|
||||
workflow_name: |
|
||||
${{ inputs.workflow_name }}
|
||||
task_action: |
|
||||
${{ inputs.task_action }}
|
||||
environment: workflow_prod
|
||||
secrets: inherit
|
||||
|
||||
notify-failure:
|
||||
needs: [called_workflow_template]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
32
.github/workflows/dbt_alter_gha_tasks.yml
vendored
Normal file
32
.github/workflows/dbt_alter_gha_tasks.yml
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
name: dbt_alter_gha_tasks
|
||||
run-name: dbt_alter_gha_tasks
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
workflow_name:
|
||||
type: string
|
||||
description: Name of the workflow to perform the action on, no .yml extension
|
||||
required: true
|
||||
task_action:
|
||||
type: choice
|
||||
description: Action to perform
|
||||
required: true
|
||||
options:
|
||||
- SUSPEND
|
||||
- RESUME
|
||||
default: SUSPEND
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_alter_gha_tasks.yml@pre-release/v4-beta
|
||||
with:
|
||||
workflow_name: ${{ inputs.workflow_name }}
|
||||
task_action: ${{ inputs.task_action }}
|
||||
target: prod
|
||||
secrets: inherit
|
||||
19
.github/workflows/dbt_deploy_new_workflows.yml
vendored
Normal file
19
.github/workflows/dbt_deploy_new_workflows.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: dbt_deploy_new_workflows
|
||||
run-name: dbt_deploy_new_workflows
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Deploy New Github Actions
|
||||
command: |
|
||||
make deploy_new_gha_tasks DBT_TARGET=prod
|
||||
secrets: inherit
|
||||
71
.github/workflows/dbt_docs_update.yml
vendored
71
.github/workflows/dbt_docs_update.yml
vendored
@ -5,75 +5,10 @@ on:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
USE_VARS: "${{ vars.USE_VARS }}"
|
||||
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
|
||||
DBT_VERSION: "${{ vars.DBT_VERSION }}"
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: refresh ddl for datashare
|
||||
run: |
|
||||
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
|
||||
- name: checkout docs branch
|
||||
run: |
|
||||
git checkout -B docs origin/main
|
||||
- name: generate dbt docs
|
||||
run: dbt docs generate -t prod
|
||||
|
||||
- name: move files to docs directory
|
||||
run: |
|
||||
mkdir -p ./docs
|
||||
cp target/{catalog.json,manifest.json,index.html} docs/
|
||||
- name: clean up target directory
|
||||
run: dbt clean
|
||||
|
||||
- name: check for changes
|
||||
run: git status
|
||||
|
||||
- name: stage changed files
|
||||
run: git add .
|
||||
|
||||
- name: commit changed files
|
||||
run: |
|
||||
git config user.email "abc@xyz"
|
||||
git config user.name "github-actions"
|
||||
git commit -am "Auto-update docs"
|
||||
- name: push changes to docs
|
||||
run: |
|
||||
git push -f --set-upstream origin docs
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_docs_update.yml@pre-release/v4-beta
|
||||
secrets: inherit
|
||||
31
.github/workflows/dbt_integration_test.yml
vendored
31
.github/workflows/dbt_integration_test.yml
vendored
@ -3,39 +3,20 @@ run-name: ${{ github.event.inputs.branch }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
environment:
|
||||
required: true
|
||||
type: string
|
||||
required: true
|
||||
|
||||
concurrency: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
prepare_vars:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: ${{ inputs.environment }}
|
||||
outputs:
|
||||
warehouse: ${{ steps.set_outputs.outputs.warehouse }}
|
||||
steps:
|
||||
- name: Set warehouse output
|
||||
id: set_outputs
|
||||
run: |
|
||||
echo "warehouse=${{ vars.WAREHOUSE }}" >> $GITHUB_OUTPUT
|
||||
|
||||
called_workflow_template:
|
||||
needs: prepare_vars
|
||||
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt.yml@main
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_integration_test.yml@pre-release/v4-beta
|
||||
with:
|
||||
command: >
|
||||
target: ${{ inputs.environment }}
|
||||
command: |
|
||||
dbt test --selector 'integration_tests'
|
||||
environment: ${{ inputs.environment }}
|
||||
warehouse: ${{ needs.prepare_vars.outputs.warehouse }}
|
||||
secrets: inherit
|
||||
|
||||
notify-failure:
|
||||
needs: [called_workflow_template]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
|
||||
56
.github/workflows/dbt_run_abi_refresh.yml
vendored
56
.github/workflows/dbt_run_abi_refresh.yml
vendored
@ -1,56 +0,0 @@
|
||||
name: dbt_run_abi_refresh
|
||||
run-name: dbt_run_abi_refresh
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Update ABI models
|
||||
run: |
|
||||
dbt run -m "optimism_models,tag:abis"
|
||||
|
||||
- name: Kick off decoded logs history, if there are new ABIs from users
|
||||
run: |
|
||||
dbt run-operation run_decoded_logs_history
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
61
.github/workflows/dbt_run_adhoc.yml
vendored
61
.github/workflows/dbt_run_adhoc.yml
vendored
@ -1,13 +1,13 @@
|
||||
name: dbt_run_adhoc
|
||||
run-name: dbt_run_adhoc
|
||||
run-name: ${{ inputs.dbt_command }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
environment:
|
||||
type: choice
|
||||
target:
|
||||
type: choice
|
||||
description: DBT Run Environment
|
||||
required: true
|
||||
options:
|
||||
@ -15,9 +15,9 @@ on:
|
||||
- prod
|
||||
default: dev
|
||||
warehouse:
|
||||
type: choice
|
||||
type: choice
|
||||
description: Snowflake warehouse
|
||||
required: true
|
||||
required: true
|
||||
options:
|
||||
- DBT
|
||||
- DBT_CLOUD
|
||||
@ -26,49 +26,18 @@ on:
|
||||
default: DBT
|
||||
dbt_command:
|
||||
type: string
|
||||
description: "DBT Run Command"
|
||||
description: 'DBT Run Command'
|
||||
required: true
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ inputs.warehouse }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_${{ inputs.environment }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
${{ inputs.dbt_command }}
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
warehouse: ${{ inputs.warehouse }}
|
||||
target: ${{ inputs.target }}
|
||||
command_name: Run DBT Command
|
||||
command: ${{ inputs.dbt_command }}
|
||||
secrets: inherit
|
||||
77
.github/workflows/dbt_run_dev_refresh.yml
vendored
77
.github/workflows/dbt_run_dev_refresh.yml
vendored
@ -3,80 +3,13 @@ run-name: dbt_run_dev_refresh
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs "at 13:36 UTC Monday" (see https://crontab.guru)
|
||||
- cron: "36 13 * * 1"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs_refresh:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run-operation run_sp_create_prod_clone
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs_refresh]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
|
||||
run_dbt_jobs_udfs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: run_dbt_jobs_refresh
|
||||
environment:
|
||||
name: workflow_dev
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run Recreate UDFs
|
||||
run: |
|
||||
dbt run-operation fsc_utils.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
|
||||
dbt run -s livequery_models.deploy.core._live --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
|
||||
|
||||
notify-failure2:
|
||||
needs: [run_dbt_jobs_udfs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_dev_refresh.yml@pre-release/v4-beta
|
||||
secrets: inherit
|
||||
53
.github/workflows/dbt_run_full_observability.yml
vendored
53
.github/workflows/dbt_run_full_observability.yml
vendored
@ -3,50 +3,21 @@ run-name: dbt_run_full_observability
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs “At 06:00 on day-of-month 1.” (see https://crontab.guru)
|
||||
- cron: "0 6 1 * *"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod_2xl
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
warehouse: DBT_EMERGENCY
|
||||
command_name: Run Observability Models
|
||||
command: |
|
||||
dbt run --threads 2 --vars '{"MAIN_OBSERV_FULL_TEST_ENABLED":True}' -m "fsc_evm,tag:observability"
|
||||
secrets: inherit
|
||||
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --threads 2 --vars '{"OBSERV_FULL_TEST":True}' -m "optimism_models,tag:observability"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
|
||||
53
.github/workflows/dbt_run_heal_models.yml
vendored
53
.github/workflows/dbt_run_heal_models.yml
vendored
@ -3,50 +3,17 @@ run-name: dbt_run_heal_models
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs at 04:00 on Wednesday (see https://crontab.guru)
|
||||
- cron: "0 4 * * 3"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "optimism_models,tag:heal" --vars '{"HEAL_MODEL":True}'
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Heal Models
|
||||
command: |
|
||||
dbt run -m "$PROJECT_NAME,tag:heal" --vars '{"HEAL_MODEL":True}'
|
||||
secrets: inherit
|
||||
59
.github/workflows/dbt_run_operation_reorg.yml
vendored
59
.github/workflows/dbt_run_operation_reorg.yml
vendored
@ -1,59 +0,0 @@
|
||||
name: dbt_run_operation_reorg
|
||||
run-name: dbt_run_operation_reorg
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs at minute 50 every Monday (see https://crontab.guru)
|
||||
- cron: "50 0 * * 1"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: List reorg models
|
||||
id: list_models
|
||||
run: |
|
||||
reorg_model_list=$(dbt list --select "optimism_models,tag:reorg" --resource-type model --output name | grep '__' | awk -F'.' '{print $NF}' | tr '\n' ',' | sed 's/,$//')
|
||||
echo "model_list=$reorg_model_list" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Execute block_reorg macro
|
||||
run: |
|
||||
dbt run-operation fsc_utils.block_reorg --args "{reorg_model_list: '${{ steps.list_models.outputs.model_list }}', hours: '169'}" && awk '/SQL status/ {print; next} /DELETE FROM/{getline; print} /\/\* {/ {print}' logs/dbt.log
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
51
.github/workflows/dbt_run_overflowed_traces.yml
vendored
51
.github/workflows/dbt_run_overflowed_traces.yml
vendored
@ -1,51 +0,0 @@
|
||||
name: dbt_run_overflowed_traces
|
||||
run-name: dbt_run_overflowed_traces
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "optimism_models,tag:overflowed_traces" --vars '{"OVERFLOWED_TRACES":True}'
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
22
.github/workflows/dbt_run_scheduled_abis.yml
vendored
Normal file
22
.github/workflows/dbt_run_scheduled_abis.yml
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
name: dbt_run_scheduled_abis
|
||||
run-name: dbt_run_scheduled_abis
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run ABI Models
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:gold,tag:abis"
|
||||
command_name_2: Kick off decoded logs history, if there are new user submitted ABIs
|
||||
command_2: |
|
||||
dbt run-operation fsc_evm.run_decoded_logs_history
|
||||
secrets: inherit
|
||||
46
.github/workflows/dbt_run_scheduled_curated.yml
vendored
46
.github/workflows/dbt_run_scheduled_curated.yml
vendored
@ -6,46 +6,14 @@ on:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "optimism_models,tag:curated"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Curated Models
|
||||
command: |
|
||||
dbt run -m "$PROJECT_NAME,tag:curated" "fsc_evm,tag:curated"
|
||||
secrets: inherit
|
||||
19
.github/workflows/dbt_run_scheduled_decoder.yml
vendored
Normal file
19
.github/workflows/dbt_run_scheduled_decoder.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: dbt_run_scheduled_decoder
|
||||
run-name: dbt_run_scheduled_decoder
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Decoder Models
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:bronze,tag:decoded_logs" "fsc_evm,tag:silver,tag:decoded_logs" "fsc_evm,tag:gold,tag:decoded_logs"
|
||||
secrets: inherit
|
||||
19
.github/workflows/dbt_run_scheduled_main.yml
vendored
Normal file
19
.github/workflows/dbt_run_scheduled_main.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: dbt_run_scheduled_main
|
||||
run-name: dbt_run_scheduled_main
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Main Models
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:bronze,tag:core" "fsc_evm,tag:silver,tag:core" "fsc_evm,tag:gold,tag:core" "fsc_evm,tag:silver,tag:prices" "fsc_evm,tag:gold,tag:prices" "fsc_evm,tag:silver,tag:labels" "fsc_evm,tag:gold,tag:labels" "fsc_evm,tag:gold,tag:nft" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
|
||||
secrets: inherit
|
||||
@ -1,51 +0,0 @@
|
||||
name: dbt_run_scheduled_non_realtime
|
||||
run-name: dbt_run_scheduled_non_realtime
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "optimism_models,tag:non_realtime" "optimism_models,tag:streamline_decoded_logs_complete" "optimism_models,tag:streamline_decoded_logs_realtime"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
20
.github/workflows/dbt_run_scheduled_scores.yml
vendored
Normal file
20
.github/workflows/dbt_run_scheduled_scores.yml
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
|
||||
name: dbt_run_scheduled_scores
|
||||
run-name: dbt_run_scheduled_scores
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Scores Models
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:scores"
|
||||
secrets: inherit
|
||||
@ -6,50 +6,15 @@ on:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "optimism_models,tag:streamline_core_complete" "optimism_models,tag:streamline_core_realtime" "optimism_models,tag:streamline_core_complete_receipts" "optimism_models,tag:streamline_core_realtime_receipts" "optimism_models,tag:streamline_core_complete_confirm_blocks" "optimism_models,tag:streamline_core_realtime_confirm_blocks"
|
||||
|
||||
- name: Run Chainhead Tests
|
||||
run: |
|
||||
dbt test -m "optimism_models,tag:chainhead"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Chainhead Models
|
||||
command: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:chainhead" "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash"
|
||||
dbt test -m "fsc_evm,tag:chainhead"
|
||||
secrets: inherit
|
||||
@ -1,56 +0,0 @@
|
||||
name: dbt_run_streamline_decoded_logs_history
|
||||
run-name: dbt_run_streamline_decoded_logs_history
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Update complete table
|
||||
run: |
|
||||
dbt run -m "optimism_models,tag:streamline_decoded_logs_complete"
|
||||
|
||||
- name: Decode historical logs
|
||||
run: |
|
||||
dbt run-operation decoded_logs_history --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
51
.github/workflows/dbt_run_streamline_decoder.yml
vendored
51
.github/workflows/dbt_run_streamline_decoder.yml
vendored
@ -1,51 +0,0 @@
|
||||
name: dbt_run_streamline_decoder
|
||||
run-name: dbt_run_streamline_decoder
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "optimism_models,tag:decoded_logs"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
22
.github/workflows/dbt_run_streamline_decoder_history.yml
vendored
Normal file
22
.github/workflows/dbt_run_streamline_decoder_history.yml
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
name: dbt_run_streamline_decoder_history
|
||||
run-name: dbt_run_streamline_decoder_history
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Decoder Complete
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete"
|
||||
command_name_2: Run Streamline Decoder History
|
||||
command_2: |
|
||||
dbt run-operation fsc_evm.decoded_logs_history --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
|
||||
secrets: inherit
|
||||
19
.github/workflows/dbt_run_streamline_history.yml
vendored
Normal file
19
.github/workflows/dbt_run_streamline_history.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: dbt_run_streamline_history
|
||||
run-name: dbt_run_streamline_history
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run History Models
|
||||
command: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:history" --exclude "fsc_evm,tag:receipts_by_hash"
|
||||
secrets: inherit
|
||||
@ -1,77 +0,0 @@
|
||||
name: dbt_run_streamline_history_adhoc
|
||||
run-name: dbt_run_streamline_history_adhoc
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
environment:
|
||||
type: choice
|
||||
description: DBT Run Environment
|
||||
required: true
|
||||
options:
|
||||
- dev
|
||||
- prod
|
||||
- prod_backfill
|
||||
default: dev
|
||||
warehouse:
|
||||
type: choice
|
||||
description: Snowflake warehouse
|
||||
required: true
|
||||
options:
|
||||
- DBT
|
||||
- DBT_CLOUD
|
||||
- DBT_EMERGENCY
|
||||
default: DBT
|
||||
dbt_command:
|
||||
type: choice
|
||||
description: "DBT Run Command"
|
||||
required: true
|
||||
options:
|
||||
- dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "optimism_models,tag:streamline_core_complete" "optimism_models,tag:streamline_core_history" "optimism_models,tag:streamline_core_complete_receipts" "optimism_models,tag:streamline_core_history_receipts" "optimism_models,tag:streamline_core_complete_confirm_blocks" "optimism_models,tag:streamline_core_history_confirm_blocks"
|
||||
- dbt run --threads 8 --vars '{"STREAMLINE_INVOKE_STREAMS":True,"WAIT":120}' -m "optimism_models,tag:streamline_decoded_logs_complete" "optimism_models,tag:streamline_decoded_logs_history"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ inputs.warehouse }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_${{ inputs.environment }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
${{ inputs.dbt_command }}
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
59
.github/workflows/dbt_test_daily.yml
vendored
59
.github/workflows/dbt_test_daily.yml
vendored
@ -1,52 +1,23 @@
|
||||
name: dbt_test_daily
|
||||
name: dbt_test_daily
|
||||
run-name: dbt_test_daily
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs "at 9:00 UTC" (see https://crontab.guru)
|
||||
- cron: "0 9 * * *"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt test --exclude "optimism_models,tag:full_test" "optimism_models,tag:recent_test" "optimism_models,tag:gha_tasks" livequery_models livequery_base
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
target: test
|
||||
command_name: Build Daily Testing Views
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:daily_test"
|
||||
command_name_2: Run Daily Tests (all tests excluding full, recent and misc. others)
|
||||
command_2: |
|
||||
dbt test --exclude "fsc_evm,tag:full_test" "fsc_evm,tag:recent_test" "fsc_evm,tag:gha_tasks" livequery_models
|
||||
secrets: inherit
|
||||
51
.github/workflows/dbt_test_intraday.yml
vendored
51
.github/workflows/dbt_test_intraday.yml
vendored
@ -5,48 +5,17 @@ on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "optimism_models,tag:observability"
|
||||
dbt test -m "optimism_models,tag:recent_test"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
target: test
|
||||
command_name: Run Observability & Recent Tests
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:observability"
|
||||
dbt test -m "fsc_evm,tag:recent_test"
|
||||
secrets: inherit
|
||||
57
.github/workflows/dbt_test_monthly.yml
vendored
57
.github/workflows/dbt_test_monthly.yml
vendored
@ -3,50 +3,21 @@ run-name: dbt_test_monthly
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs “At 8pm on 28th of month.” (see https://crontab.guru)
|
||||
- cron: "0 20 28 * *"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt test -m "optimism_models,tag:full_test"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
warehouse: DBT_EMERGENCY
|
||||
command_name: Build Full Testing Views
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:full_test"
|
||||
command_name_2: Run Full Tests
|
||||
command_2: |
|
||||
dbt test -m "fsc_evm,tag:full_test"
|
||||
secrets: inherit
|
||||
27
.github/workflows/slack_notify.yml
vendored
27
.github/workflows/slack_notify.yml
vendored
@ -1,27 +0,0 @@
|
||||
name: Slack Notification
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
notify:
|
||||
runs-on: ubuntu-latest
|
||||
environment: workflow_prod
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install requests
|
||||
|
||||
- name: Send Slack notification
|
||||
run: python python/slack_alert.py
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
111
Makefile
111
Makefile
@ -1,6 +1,109 @@
|
||||
SHELL := /bin/bash
|
||||
DBT_TARGET ?= dev
|
||||
RECEIPTS_BY_HASH_ENABLED ?= false
|
||||
|
||||
dbt-console:
|
||||
docker-compose run dbt_console
|
||||
cleanup_time:
|
||||
@set -e; \
|
||||
rm -f package-lock.yml && dbt clean && dbt deps
|
||||
|
||||
.PHONY: dbt-console
|
||||
deploy_gha_workflows_table:
|
||||
@set -e; \
|
||||
echo "Collecting workflow names..." ; \
|
||||
WORKFLOW_VALUES="" ; \
|
||||
for file in $$(find .github/workflows -name "*.yml" -type f); do \
|
||||
filename=$$(basename "$$file" .yml) ; \
|
||||
if [ -z "$$WORKFLOW_VALUES" ]; then \
|
||||
WORKFLOW_VALUES="('$$filename')" ; \
|
||||
else \
|
||||
WORKFLOW_VALUES="$$WORKFLOW_VALUES,('$$filename')" ; \
|
||||
fi ; \
|
||||
done ; \
|
||||
echo "Found workflows: $$WORKFLOW_VALUES" ; \
|
||||
dbt run-operation create_workflow_table --args "{\"workflow_values\": \"$$WORKFLOW_VALUES\"}" -t $(DBT_TARGET)
|
||||
|
||||
deploy_gha_tasks:
|
||||
@set -e; \
|
||||
make deploy_gha_workflows_table DBT_TARGET=$(DBT_TARGET); \
|
||||
dbt run -s livequery_models.deploy.marketplace.github --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.create_gha_tasks --vars '{"RESUME_GHA_TASKS":True}' -t $(DBT_TARGET)
|
||||
|
||||
deploy_new_gha_tasks:
|
||||
@set -e; \
|
||||
make deploy_gha_workflows_table DBT_TARGET=$(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.create_gha_tasks --vars '{"RESUME_GHA_TASKS":True}' -t $(DBT_TARGET)
|
||||
|
||||
deploy_livequery:
|
||||
@set -e; \
|
||||
dbt run-operation fsc_evm.drop_livequery_schemas --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.livequery_grants --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET)
|
||||
|
||||
deploy_chain_phase_1:
|
||||
@set -e; \
|
||||
dbt run -m livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.livequery_grants --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.call_sample_rpc_node -t $(DBT_TARGET); \
|
||||
if [ "$(DBT_TARGET)" != "prod" ]; then \
|
||||
if [ "$(RECEIPTS_BY_HASH_ENABLED)" = "true" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt test -m "fsc_evm,tag:chainhead"; \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True, "MAIN_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts_by_hash" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt test -m "fsc_evm,tag:chainhead"; \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True, "MAIN_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
|
||||
fi; \
|
||||
else \
|
||||
if [ "$(RECEIPTS_BY_HASH_ENABLED)" = "true" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts" --full-refresh --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt test -m "fsc_evm,tag:chainhead"; \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts_by_hash" --full-refresh --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt test -m "fsc_evm,tag:chainhead"; \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
|
||||
fi; \
|
||||
fi; \
|
||||
echo "# wait ~10 minutes"; \
|
||||
echo "# run deploy_chain_phase_2"
|
||||
|
||||
deploy_chain_phase_2:
|
||||
@set -e; \
|
||||
if [ "$(DBT_TARGET)" != "prod" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_2" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_BRONZE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_2" --full-refresh --vars '{"GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_BRONZE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
fi; \
|
||||
echo "# wait ~10 minutes"; \
|
||||
echo "# run deploy_chain_phase_3"
|
||||
|
||||
deploy_chain_phase_3:
|
||||
@set -e; \
|
||||
if [ "$(DBT_TARGET)" != "prod" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_2" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:phase_3" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_2" -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:phase_3" --full-refresh --vars '{"GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
|
||||
fi; \
|
||||
echo "# wait ~10 minutes"; \
|
||||
echo "# run deploy_chain_phase_4"
|
||||
|
||||
deploy_chain_phase_4:
|
||||
@set -e; \
|
||||
if [ "$(DBT_TARGET)" != "prod" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_3" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:phase_4" --full-refresh -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_3" -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:phase_4" --full-refresh -t $(DBT_TARGET); \
|
||||
make deploy_gha_tasks DBT_TARGET=$(DBT_TARGET); \
|
||||
fi; \
|
||||
|
||||
.PHONY: cleanup_time deploy_gha_workflows_table deploy_gha_tasks deploy_new_gha_tasks deploy_livequery deploy_chain_phase_1 deploy_chain_phase_2 deploy_chain_phase_3 deploy_chain_phase_4
|
||||
@ -1,8 +0,0 @@
|
||||
workflow_name,workflow_schedule
|
||||
dbt_run_scheduled_non_realtime,"32 * * * *"
|
||||
dbt_run_streamline_chainhead,"20,50 * * * *"
|
||||
dbt_run_streamline_decoder,"40 * * * *"
|
||||
dbt_run_scheduled_curated,"10 */6 * * *"
|
||||
dbt_test_intraday,"8 */4 * * *"
|
||||
dbt_run_streamline_decoded_logs_history,"5 20 * * 6"
|
||||
dbt_run_abi_refresh,"41 23 * * *"
|
||||
|
11
data/observability__exclusion_list.csv
Normal file
11
data/observability__exclusion_list.csv
Normal file
@ -0,0 +1,11 @@
|
||||
block_number
|
||||
985
|
||||
19022
|
||||
45036
|
||||
123322
|
||||
123542
|
||||
1133328
|
||||
1135391
|
||||
1144468
|
||||
1244152
|
||||
1272994
|
||||
|
135
dbt_project.yml
135
dbt_project.yml
@ -17,6 +17,8 @@ test-paths: ["tests"]
|
||||
seed-paths: ["data"]
|
||||
macro-paths: ["macros"]
|
||||
snapshot-paths: ["snapshots"]
|
||||
docs-paths:
|
||||
["dbt_packages/fsc_evm/doc_descriptions", "models/doc_descriptions", "models"]
|
||||
|
||||
target-path: "target" # directory which will store compiled SQL files
|
||||
clean-targets: # directories to be removed by `dbt clean`
|
||||
@ -32,7 +34,7 @@ on-run-start:
|
||||
- "{{ create_udfs() }}"
|
||||
|
||||
on-run-end:
|
||||
- '{{ apply_meta_as_tags(results) }}'
|
||||
- "{{ apply_meta_as_tags(results) }}"
|
||||
|
||||
dispatch:
|
||||
- macro_namespace: dbt
|
||||
@ -42,18 +44,66 @@ dispatch:
|
||||
- dbt
|
||||
|
||||
query-comment:
|
||||
comment: '{{ dbt_snowflake_query_tags.get_query_comment(node) }}'
|
||||
comment: "{{ dbt_snowflake_query_tags.get_query_comment(node) }}"
|
||||
append: true # Snowflake removes prefixed comments.
|
||||
|
||||
# Configuring models
|
||||
# Full documentation: https://docs.getdbt.com/docs/configuring-models
|
||||
|
||||
models:
|
||||
+copy_grants: true
|
||||
+persist_docs:
|
||||
relation: true
|
||||
columns: true
|
||||
+on_schema_change: "append_new_columns"
|
||||
optimism_models: # replace with the name of the chain
|
||||
+copy_grants: true
|
||||
+persist_docs:
|
||||
relation: true
|
||||
columns: true
|
||||
+on_schema_change: "append_new_columns"
|
||||
livequery_models:
|
||||
+materialized: ephemeral
|
||||
fsc_evm:
|
||||
+enabled: false # disable fsc_evm package by default
|
||||
+copy_grants: true
|
||||
+persist_docs:
|
||||
relation: true
|
||||
columns: true
|
||||
+on_schema_change: "append_new_columns"
|
||||
main_package:
|
||||
+enabled: false # disable top level package by default, enabled subpackages as needed
|
||||
admin:
|
||||
+enabled: true
|
||||
core:
|
||||
+enabled: true # enable subpackages, as needed
|
||||
bronze:
|
||||
+enabled: false
|
||||
token_reads:
|
||||
+enabled: true
|
||||
github_actions:
|
||||
+enabled: true
|
||||
labels:
|
||||
+enabled: true
|
||||
observability:
|
||||
+enabled: true
|
||||
prices:
|
||||
+enabled: true
|
||||
utils:
|
||||
+enabled: true
|
||||
decoder_package:
|
||||
+enabled: false
|
||||
abis:
|
||||
+enabled: true
|
||||
decoded_logs:
|
||||
+enabled: false
|
||||
gold:
|
||||
+enabled: true
|
||||
silver:
|
||||
+enabled: true
|
||||
streamline:
|
||||
+enabled: true
|
||||
curated_package:
|
||||
+enabled: false
|
||||
stats:
|
||||
+enabled: true
|
||||
scores_package:
|
||||
+enabled: false
|
||||
|
||||
# In this example config, we tell dbt to build all models in the example/ directory
|
||||
# as tables. These settings can be overridden in the individual model files
|
||||
@ -61,25 +111,22 @@ models:
|
||||
|
||||
vars:
|
||||
"dbt_date:time_zone": GMT
|
||||
STREAMLINE_INVOKE_STREAMS: False
|
||||
UPDATE_UDFS_AND_SPS: False
|
||||
UPDATE_SNOWFLAKE_TAGS: True
|
||||
STREAMLINE_INVOKE_STREAMS: False
|
||||
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
|
||||
WAIT: 0
|
||||
OBSERV_FULL_TEST: False
|
||||
OVERFLOWED_TRACES: False
|
||||
HEAL_MODEL: False
|
||||
HEAL_MODELS: []
|
||||
START_GHA_TASKS: False
|
||||
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
|
||||
|
||||
#### STREAMLINE 2.0 BEGIN ####
|
||||
#### STREAMLINE 2.0 BEGIN ####
|
||||
|
||||
API_INTEGRATION: '{{ var("config")[target.name]["API_INTEGRATION"] if var("config")[target.name] else var("config")["dev"]["API_INTEGRATION"] }}'
|
||||
EXTERNAL_FUNCTION_URI: '{{ var("config")[target.name]["EXTERNAL_FUNCTION_URI"] if var("config")[target.name] else var("config")["dev"]["EXTERNAL_FUNCTION_URI"] }}'
|
||||
ROLES: |
|
||||
["INTERNAL_DEV"]
|
||||
|
||||
config:
|
||||
# The keys correspond to dbt profiles and are case sensitive
|
||||
# The keys correspond to dbt profiles and are case sensitive
|
||||
dev:
|
||||
API_INTEGRATION: AWS_OPTIMISM_API_STG_V2
|
||||
EXTERNAL_FUNCTION_URI: 0h08ox8fa4.execute-api.us-east-1.amazonaws.com/stg/
|
||||
@ -94,62 +141,4 @@ vars:
|
||||
- AWS_LAMBDA_OPTIMISM_API
|
||||
- INTERNAL_DEV
|
||||
- DBT_CLOUD_OPTIMISM
|
||||
|
||||
#### STREAMLINE 2.0 END ####
|
||||
|
||||
#### FSC_EVM BEGIN ####
|
||||
# Visit https://github.com/FlipsideCrypto/fsc-evm/wiki for more information on required and optional variables
|
||||
|
||||
### GLOBAL VARIABLES BEGIN ###
|
||||
## REQUIRED
|
||||
GLOBAL_PROD_DB_NAME: 'optimism'
|
||||
GLOBAL_NODE_SECRET_PATH: 'Vault/prod/optimism/quicknode/mainnet'
|
||||
GLOBAL_NODE_URL: '{service}/{Authentication}'
|
||||
GLOBAL_BLOCKS_PER_HOUR: 1800
|
||||
GLOBAL_USES_STREAMLINE_V1: True
|
||||
GLOBAL_USES_SINGLE_FLIGHT_METHOD: True
|
||||
|
||||
### GLOBAL VARIABLES END ###
|
||||
|
||||
### MAIN_PACKAGE VARIABLES BEGIN ###
|
||||
|
||||
### CORE ###
|
||||
## REQUIRED
|
||||
|
||||
## OPTIONAL
|
||||
# GOLD_FULL_REFRESH: True
|
||||
# SILVER_FULL_REFRESH: True
|
||||
# BRONZE_FULL_REFRESH: True
|
||||
|
||||
# BLOCKS_COMPLETE_FULL_REFRESH: True
|
||||
# CONFIRM_BLOCKS_COMPLETE_FULL_REFRESH: True
|
||||
# TRACES_COMPLETE_FULL_REFRESH: True
|
||||
# RECEIPTS_COMPLETE_FULL_REFRESH: True
|
||||
# TRANSACTIONS_COMPLETE_FULL_REFRESH: True
|
||||
|
||||
# BLOCKS_TRANSACTIONS_REALTIME_TESTING_LIMIT: 3
|
||||
# BLOCKS_TRANSACTIONS_HISTORY_TESTING_LIMIT: 3
|
||||
# TRACES_REALTIME_TESTING_LIMIT: 3
|
||||
# TRACES_HISTORY_TESTING_LIMIT: 3
|
||||
# ARBTRACE_BLOCK_HISTORY_TESTING_LIMIT: 3
|
||||
# RECEIPTS_REALTIME_TESTING_LIMIT: 3
|
||||
# RECEIPTS_HISTORY_TESTING_LIMIT: 3
|
||||
# CONFIRM_BLOCKS_REALTIME_TESTING_LIMIT: 3
|
||||
# CONFIRM_BLOCKS_HISTORY_TESTING_LIMIT: 3
|
||||
|
||||
# ### MAIN_PACKAGE VARIABLES END ###
|
||||
|
||||
# ### DECODER_PACKAGE VARIABLES BEGIN ###
|
||||
|
||||
# ## REQUIRED
|
||||
|
||||
# ## OPTIONAL
|
||||
|
||||
# DECODED_LOGS_COMPLETE_FULL_REFRESH: True
|
||||
|
||||
# DECODED_LOGS_REALTIME_TESTING_LIMIT: 3
|
||||
# DECODED_LOGS_HISTORY_SQL_LIMIT: 1 #limit per monthly range
|
||||
|
||||
### DECODER_PACKAGE VARIABLES END ###
|
||||
|
||||
#### FSC_EVM END ####
|
||||
@ -1,18 +1,9 @@
|
||||
{% macro create_udfs() %}
|
||||
{% if var("UPDATE_UDFS_AND_SPS") %}
|
||||
{% if var("UPDATE_UDFS_AND_SPS", false) %}
|
||||
{% set sql %}
|
||||
CREATE schema if NOT EXISTS silver;
|
||||
|
||||
{{ create_udtf_get_base_table(
|
||||
schema = "streamline"
|
||||
) }}
|
||||
{{ create_udf_get_chainhead() }}
|
||||
{{ create_udf_bulk_json_rpc() }}
|
||||
{{ create_udf_bulk_decode_logs() }}
|
||||
{{ create_udf_bulk_get_traces() }}
|
||||
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{{- fsc_utils.create_udfs() -}}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
{% endmacro %}
|
||||
@ -1,22 +0,0 @@
|
||||
{% macro standard_predicate(
|
||||
input_column = 'block_number'
|
||||
) -%}
|
||||
{%- set database_name = target.database -%}
|
||||
{%- set schema_name = generate_schema_name(
|
||||
node = model
|
||||
) -%}
|
||||
{%- set table_name = generate_alias_name(
|
||||
node = model
|
||||
) -%}
|
||||
{%- set tmp_table_name = table_name ~ '__dbt_tmp' -%}
|
||||
{%- set full_table_name = database_name ~ '.' ~ schema_name ~ '.' ~ table_name -%}
|
||||
{%- set full_tmp_table_name = database_name ~ '.' ~ schema_name ~ '.' ~ tmp_table_name -%}
|
||||
{{ full_table_name }}.{{ input_column }} >= (
|
||||
SELECT
|
||||
MIN(
|
||||
{{ input_column }}
|
||||
)
|
||||
FROM
|
||||
{{ full_tmp_table_name }}
|
||||
)
|
||||
{%- endmacro %}
|
||||
@ -1,124 +0,0 @@
|
||||
{% macro decoded_logs_history(backfill_mode=false) %}
|
||||
|
||||
{%- set params = {
|
||||
"sql_limit": var("DECODED_LOGS_HISTORY_SQL_LIMIT", 8000000),
|
||||
"producer_batch_size": var("DECODED_LOGS_HISTORY_PRODUCER_BATCH_SIZE", 400000),
|
||||
"worker_batch_size": var("DECODED_LOGS_HISTORY_WORKER_BATCH_SIZE", 100000)
|
||||
} -%}
|
||||
|
||||
{% set wait_time = var("DECODED_LOGS_HISTORY_WAIT_TIME", 60) %}
|
||||
{% set find_months_query %}
|
||||
SELECT
|
||||
DISTINCT date_trunc('month', block_timestamp)::date as month
|
||||
FROM {{ ref('core__fact_blocks') }}
|
||||
ORDER BY month ASC
|
||||
{% endset %}
|
||||
{% set results = run_query(find_months_query) %}
|
||||
|
||||
{% if execute %}
|
||||
{% set months = results.columns[0].values() %}
|
||||
|
||||
{% for month in months %}
|
||||
{% set view_name = 'decoded_logs_history_' ~ month.strftime('%Y_%m') %}
|
||||
|
||||
{% set create_view_query %}
|
||||
create or replace view streamline.{{view_name}} as (
|
||||
WITH target_blocks AS (
|
||||
SELECT
|
||||
block_number
|
||||
FROM {{ ref('core__fact_blocks') }}
|
||||
WHERE date_trunc('month', block_timestamp) = '{{month}}'::timestamp
|
||||
),
|
||||
new_abis AS (
|
||||
SELECT
|
||||
abi,
|
||||
parent_contract_address,
|
||||
event_signature,
|
||||
start_block,
|
||||
end_block
|
||||
FROM {{ ref('silver__complete_event_abis') }}
|
||||
{% if not backfill_mode %}
|
||||
WHERE inserted_timestamp > dateadd('day', -30, sysdate())
|
||||
{% endif %}
|
||||
),
|
||||
existing_logs_to_exclude AS (
|
||||
SELECT _log_id
|
||||
FROM {{ ref('streamline__decoded_logs_complete') }} l
|
||||
INNER JOIN target_blocks b using (block_number)
|
||||
),
|
||||
candidate_logs AS (
|
||||
SELECT
|
||||
l.block_number,
|
||||
l.tx_hash,
|
||||
l.event_index,
|
||||
l.contract_address,
|
||||
l.topics,
|
||||
l.data,
|
||||
concat(l.tx_hash::string, '-', l.event_index::string) as _log_id
|
||||
FROM target_blocks b
|
||||
INNER JOIN {{ ref('core__fact_event_logs') }} l using (block_number)
|
||||
WHERE l.tx_status = 'SUCCESS' and date_trunc('month', l.block_timestamp) = '{{month}}'::timestamp
|
||||
)
|
||||
SELECT
|
||||
l.block_number,
|
||||
l._log_id,
|
||||
A.abi,
|
||||
OBJECT_CONSTRUCT(
|
||||
'topics', l.topics,
|
||||
'data', l.data,
|
||||
'address', l.contract_address
|
||||
) AS data
|
||||
FROM candidate_logs l
|
||||
INNER JOIN new_abis A
|
||||
ON A.parent_contract_address = l.contract_address
|
||||
AND A.event_signature = l.topics[0]::STRING
|
||||
AND l.block_number BETWEEN A.start_block AND A.end_block
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM existing_logs_to_exclude e
|
||||
WHERE e._log_id = l._log_id
|
||||
)
|
||||
LIMIT {{ params.sql_limit }}
|
||||
)
|
||||
{% endset %}
|
||||
{# Create the view #}
|
||||
{% do run_query(create_view_query) %}
|
||||
{{ log("Created view for month " ~ month.strftime('%Y-%m'), info=True) }}
|
||||
{% if var("STREAMLINE_INVOKE_STREAMS", false) %}
|
||||
{# Check if rows exist first #}
|
||||
{% set check_rows_query %}
|
||||
SELECT EXISTS(SELECT 1 FROM streamline.{{view_name}} LIMIT 1)
|
||||
{% endset %}
|
||||
|
||||
{% set results = run_query(check_rows_query) %}
|
||||
{% set has_rows = results.columns[0].values()[0] %}
|
||||
|
||||
{% if has_rows %}
|
||||
{# Invoke streamline, if rows exist to decode #}
|
||||
{% set decode_query %}
|
||||
SELECT
|
||||
streamline.udf_bulk_decode_logs_v2(
|
||||
PARSE_JSON(
|
||||
$${ "external_table": "decoded_logs",
|
||||
"producer_batch_size": {{ params.producer_batch_size }},
|
||||
"sql_limit": {{ params.sql_limit }},
|
||||
"sql_source": "{{view_name}}",
|
||||
"worker_batch_size": {{ params.worker_batch_size }} }$$
|
||||
)
|
||||
);
|
||||
{% endset %}
|
||||
|
||||
{% do run_query(decode_query) %}
|
||||
{{ log("Triggered decoding for month " ~ month.strftime('%Y-%m'), info=True) }}
|
||||
{# Call wait since we actually did some decoding #}
|
||||
{% do run_query("call system$wait(" ~ wait_time ~ ")") %}
|
||||
{{ log("Completed wait after decoding for month " ~ month.strftime('%Y-%m'), info=True) }}
|
||||
{% else %}
|
||||
{{ log("No rows to decode for month " ~ month.strftime('%Y-%m'), info=True) }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,29 +0,0 @@
|
||||
{% macro run_decoded_logs_history() %}
|
||||
|
||||
{% set blockchain = var('GLOBAL_PROD_DB_NAME','').lower() %}
|
||||
|
||||
{% set check_for_new_user_abis_query %}
|
||||
select 1
|
||||
from {{ ref('silver__user_verified_abis') }}
|
||||
where _inserted_timestamp::date = sysdate()::date
|
||||
and dayname(sysdate()) <> 'Sat'
|
||||
{% endset %}
|
||||
{% set results = run_query(check_for_new_user_abis_query) %}
|
||||
{% if execute %}
|
||||
{% set new_user_abis = results.columns[0].values()[0] %}
|
||||
|
||||
{% if new_user_abis %}
|
||||
{% set invoke_workflow_query %}
|
||||
SELECT
|
||||
github_actions.workflow_dispatches(
|
||||
'FlipsideCrypto',
|
||||
'{{ blockchain }}' || '-models',
|
||||
'dbt_run_streamline_decoded_logs_history.yml',
|
||||
NULL
|
||||
)
|
||||
{% endset %}
|
||||
|
||||
{% do run_query(invoke_workflow_query) %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
@ -1,101 +0,0 @@
|
||||
{% macro streamline_external_table_query_decoder(
|
||||
source_name,
|
||||
source_version
|
||||
) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
job_created_time AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number,
|
||||
TO_DATE(
|
||||
concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5))
|
||||
) AS _partition_by_created_date
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_file_registration_history(
|
||||
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
|
||||
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}')
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
id :: STRING AS id,
|
||||
DATA,
|
||||
metadata,
|
||||
b.file_name,
|
||||
_inserted_timestamp,
|
||||
s._partition_by_block_number AS _partition_by_block_number,
|
||||
s._partition_by_created_date AS _partition_by_created_date
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
source_name ~ source_version
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b._partition_by_block_number = s._partition_by_block_number
|
||||
AND b._partition_by_created_date = s._partition_by_created_date
|
||||
WHERE
|
||||
b._partition_by_block_number = s._partition_by_block_number
|
||||
AND b._partition_by_created_date = s._partition_by_created_date
|
||||
AND s._partition_by_created_date >= DATEADD('day', -2, CURRENT_TIMESTAMP())
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro streamline_external_table_query_decoder_fr(
|
||||
source_name,
|
||||
source_version
|
||||
) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
registered_on AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number,
|
||||
TO_DATE(
|
||||
concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5))
|
||||
) AS _partition_by_created_date
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_files(
|
||||
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}'
|
||||
)
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
id :: STRING AS id,
|
||||
DATA,
|
||||
metadata,
|
||||
b.file_name,
|
||||
_inserted_timestamp,
|
||||
s._partition_by_block_number AS _partition_by_block_number,
|
||||
s._partition_by_created_date AS _partition_by_created_date
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
source_name ~ source_version
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b._partition_by_block_number = s._partition_by_block_number
|
||||
AND b._partition_by_created_date = s._partition_by_created_date
|
||||
WHERE
|
||||
b._partition_by_block_number = s._partition_by_block_number
|
||||
AND b._partition_by_created_date = s._partition_by_created_date
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
{% endmacro %}
|
||||
@ -1,141 +0,0 @@
|
||||
{% macro streamline_external_table_query(
|
||||
source_name,
|
||||
source_version,
|
||||
partition_function,
|
||||
balances,
|
||||
block_number,
|
||||
uses_receipts_by_hash
|
||||
) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
job_created_time AS _inserted_timestamp,
|
||||
file_name,
|
||||
{{ partition_function }} AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_file_registration_history(
|
||||
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
|
||||
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}')
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
b._inserted_timestamp
|
||||
|
||||
{% if balances %},
|
||||
r.block_timestamp :: TIMESTAMP AS block_timestamp
|
||||
{% endif %}
|
||||
|
||||
{% if block_number %},
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: STRING,
|
||||
s.metadata :request :"data" :id :: STRING,
|
||||
PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: STRING
|
||||
) :: INT AS block_number
|
||||
{% endif %}
|
||||
{% if uses_receipts_by_hash %},
|
||||
s.value :"TX_HASH" :: STRING AS tx_hash
|
||||
{% endif %}
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
source_name ~ source_version
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.partition_key
|
||||
|
||||
{% if balances %}
|
||||
JOIN {{ ref('_block_ranges') }}
|
||||
r
|
||||
ON r.block_number = COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: INT,
|
||||
s.value :"block_number" :: INT
|
||||
)
|
||||
{% endif %}
|
||||
WHERE
|
||||
b.partition_key = s.partition_key
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
{% endmacro %}
|
||||
|
||||
{% macro streamline_external_table_query_fr(
|
||||
source_name,
|
||||
source_version,
|
||||
partition_function,
|
||||
partition_join_key,
|
||||
balances,
|
||||
block_number,
|
||||
uses_receipts_by_hash
|
||||
) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
registered_on AS _inserted_timestamp,
|
||||
file_name,
|
||||
{{ partition_function }} AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_files(
|
||||
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}'
|
||||
)
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
b._inserted_timestamp
|
||||
|
||||
{% if balances %},
|
||||
r.block_timestamp :: TIMESTAMP AS block_timestamp
|
||||
{% endif %}
|
||||
|
||||
{% if block_number %},
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: STRING,
|
||||
s.value :"block_number" :: STRING,
|
||||
s.metadata :request :"data" :id :: STRING,
|
||||
PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: STRING
|
||||
) :: INT AS block_number
|
||||
{% endif %}
|
||||
{% if uses_receipts_by_hash %},
|
||||
s.value :"TX_HASH" :: STRING AS tx_hash
|
||||
{% endif %}
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
source_name ~ source_version
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.{{ partition_join_key }}
|
||||
|
||||
{% if balances %}
|
||||
JOIN {{ ref('_block_ranges') }}
|
||||
r
|
||||
ON r.block_number = COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: INT,
|
||||
s.value :"block_number" :: INT
|
||||
)
|
||||
{% endif %}
|
||||
WHERE
|
||||
b.partition_key = s.{{ partition_join_key }}
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
{% endmacro %}
|
||||
@ -1,36 +0,0 @@
|
||||
{% macro log_bronze_details(source_name, source_version, model_type, partition_function, partition_join_key, block_number, uses_receipts_by_hash) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
{% if model_type != '' %}
|
||||
{% set model_type = '_' ~ model_type %}
|
||||
{% endif %}
|
||||
|
||||
{%- if flags.WHICH == 'compile' and execute -%}
|
||||
|
||||
{{ log("=== Current Variable Settings ===", info=True) }}
|
||||
{{ log(source_name ~ model_type ~ '_PARTITION_FUNCTION: ' ~ partition_function, info=True) }}
|
||||
{{ log(source_name ~ model_type ~ '_PARTITION_JOIN_KEY: ' ~ partition_join_key, info=True) }}
|
||||
{{ log(source_name ~ model_type ~ '_BLOCK_NUMBER: ' ~ block_number, info=True) }}
|
||||
{% if uses_receipts_by_hash %}
|
||||
{{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }}
|
||||
{% endif %}
|
||||
|
||||
{{ log("", info=True) }}
|
||||
{{ log("=== Source Details ===", info=True) }}
|
||||
{{ log("Source: " ~ source('bronze_streamline', source_name.lower() ~ source_version.lower()), info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{% set config_log = '\n' %}
|
||||
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
|
||||
{% set config_log = config_log ~ '\n{{ config (\n' %}
|
||||
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
|
||||
{% set config_log = config_log ~ ') }}\n' %}
|
||||
{{ log(config_log, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{%- endif -%}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,29 +0,0 @@
|
||||
{% macro log_complete_details(post_hook, full_refresh_type, uses_receipts_by_hash) %}
|
||||
|
||||
{%- if flags.WHICH == 'compile' and execute -%}
|
||||
|
||||
{% if uses_receipts_by_hash %}
|
||||
|
||||
{{ log("=== Current Variable Settings ===", info=True) }}
|
||||
{{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% set config_log = '\n' %}
|
||||
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
|
||||
{% set config_log = config_log ~ '\n{{ config (\n' %}
|
||||
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' unique_key = "' ~ config.get('unique_key') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' cluster_by = "' ~ config.get('cluster_by') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' merge_update_columns = ' ~ config.get('merge_update_columns') | tojson ~ ',\n' %}
|
||||
{% set config_log = config_log ~ ' post_hook = "' ~ post_hook ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' incremental_predicates = ' ~ config.get('incremental_predicates') | tojson ~ ',\n' %}
|
||||
{% set config_log = config_log ~ ' full_refresh = ' ~ full_refresh_type ~ ',\n' %}
|
||||
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
|
||||
{% set config_log = config_log ~ ') }}\n' %}
|
||||
{{ log(config_log, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{%- endif -%}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,36 +0,0 @@
|
||||
{% macro log_model_details(vars=false, params=false) %}
|
||||
|
||||
{%- if execute -%}
|
||||
/*
|
||||
DBT Model Config:
|
||||
{{ model.config | tojson(indent=2) }}
|
||||
*/
|
||||
|
||||
{% if vars is not false %}
|
||||
|
||||
{% if var('LOG_MODEL_DETAILS', false) %}
|
||||
{{ log( vars | tojson(indent=2), info=True) }}
|
||||
{% endif %}
|
||||
/*
|
||||
Variables:
|
||||
{{ vars | tojson(indent=2) }}
|
||||
*/
|
||||
{% endif %}
|
||||
|
||||
{% if params is not false %}
|
||||
|
||||
{% if var('LOG_MODEL_DETAILS', false) %}
|
||||
{{ log( params | tojson(indent=2), info=True) }}
|
||||
{% endif %}
|
||||
/*
|
||||
Parameters:
|
||||
{{ params | tojson(indent=2) }}
|
||||
*/
|
||||
{% endif %}
|
||||
|
||||
/*
|
||||
Raw Code:
|
||||
{{ model.raw_code }}
|
||||
*/
|
||||
{%- endif -%}
|
||||
{% endmacro %}
|
||||
@ -1,55 +0,0 @@
|
||||
{% macro log_streamline_details(model_name, model_type, node_url, model_quantum_state, sql_limit, testing_limit, order_by_clause, new_build, streamline_params, uses_receipts_by_hash, method, method_params, min_block=0) %}
|
||||
|
||||
{%- if flags.WHICH == 'compile' and execute -%}
|
||||
|
||||
{{ log("=== Current Variable Settings ===", info=True) }}
|
||||
{{ log("START_UP_BLOCK: " ~ min_block, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{{ log("=== API Details ===", info=True) }}
|
||||
|
||||
{{ log("NODE_URL: " ~ node_url, info=True) }}
|
||||
{{ log("NODE_SECRET_PATH: " ~ var('GLOBAL_NODE_SECRET_PATH'), info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{{ log("=== Current Variable Settings ===", info=True) }}
|
||||
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_model_quantum_state').upper() ~ ': ' ~ model_quantum_state, info=True) }}
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_sql_limit').upper() ~ ': ' ~ sql_limit, info=True) }}
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_testing_limit').upper() ~ ': ' ~ testing_limit, info=True) }}
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper() ~ ': ' ~ order_by_clause, info=True) }}
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_new_build').upper() ~ ': ' ~ new_build, info=True) }}
|
||||
{{ log('USES_RECEIPTS_BY_HASH' ~ ': ' ~ uses_receipts_by_hash, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{{ log("=== RPC Details ===", info=True) }}
|
||||
|
||||
{{ log(model_name ~ ": {", info=True) }}
|
||||
{{ log(" method: '" ~ method ~ "',", info=True) }}
|
||||
{{ log(" method_params: " ~ method_params, info=True) }}
|
||||
{{ log("}", info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{% set params_str = streamline_params | tojson %}
|
||||
{% set params_formatted = params_str | replace('{', '{\n ') | replace('}', '\n }') | replace(', ', ',\n ') %}
|
||||
|
||||
{# Clean up the method_params formatting #}
|
||||
{% set params_formatted = params_formatted | replace('"method_params": "', '"method_params": "') | replace('\\n', ' ') | replace('\\u0027', "'") %}
|
||||
|
||||
{% set config_log = '\n' %}
|
||||
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
|
||||
{% set config_log = config_log ~ '\n{{ config (\n' %}
|
||||
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' post_hook = fsc_utils.if_data_call_function_v2(\n' %}
|
||||
{% set config_log = config_log ~ ' func = "streamline.udf_bulk_rest_api_v2",\n' %}
|
||||
{% set config_log = config_log ~ ' target = "' ~ this.schema ~ '.' ~ this.identifier ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' params = ' ~ params_formatted ~ '\n' %}
|
||||
{% set config_log = config_log ~ ' ),\n' %}
|
||||
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
|
||||
{% set config_log = config_log ~ ') }}\n' %}
|
||||
{{ log(config_log, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{%- endif -%}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,47 +0,0 @@
|
||||
{% macro set_default_variables_streamline(model_name, model_type) %}
|
||||
|
||||
{%- set node_url = var('GLOBAL_NODE_URL', '{Service}/{Authentication}') -%}
|
||||
{%- set node_secret_path = var('GLOBAL_NODE_SECRET_PATH', '') -%}
|
||||
{%- set model_quantum_state = var((model_name ~ '_' ~ model_type ~ '_quantum_state').upper(), 'streamline') -%}
|
||||
{%- set testing_limit = var((model_name ~ '_' ~ model_type ~ '_testing_limit').upper(), none) -%}
|
||||
{%- set new_build = var((model_name ~ '_' ~ model_type ~ '_new_build').upper(), false) -%}
|
||||
{%- set default_order = 'ORDER BY partition_key DESC, block_number DESC' if model_type.lower() == 'realtime'
|
||||
else 'ORDER BY partition_key ASC, block_number ASC' -%}
|
||||
{%- set order_by_clause = var((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper(), default_order) -%}
|
||||
{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%}
|
||||
|
||||
{%- set variables = {
|
||||
'node_url': node_url,
|
||||
'node_secret_path': node_secret_path,
|
||||
'model_quantum_state': model_quantum_state,
|
||||
'testing_limit': testing_limit,
|
||||
'new_build': new_build,
|
||||
'order_by_clause': order_by_clause,
|
||||
'uses_receipts_by_hash': uses_receipts_by_hash
|
||||
} -%}
|
||||
|
||||
{{ return(variables) }}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
{% macro set_default_variables_bronze(source_name, model_type) %}
|
||||
|
||||
{%- set partition_function = var(source_name ~ model_type ~ '_PARTITION_FUNCTION',
|
||||
"CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER)")
|
||||
-%}
|
||||
{%- set partition_join_key = var(source_name ~ model_type ~ '_PARTITION_JOIN_KEY', 'partition_key') -%}
|
||||
{%- set block_number = var(source_name ~ model_type ~ '_BLOCK_NUMBER', true) -%}
|
||||
{%- set balances = var(source_name ~ model_type ~ '_BALANCES', false) -%}
|
||||
{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%}
|
||||
|
||||
{%- set variables = {
|
||||
'partition_function': partition_function,
|
||||
'partition_join_key': partition_join_key,
|
||||
'block_number': block_number,
|
||||
'balances': balances,
|
||||
'uses_receipts_by_hash': uses_receipts_by_hash
|
||||
} -%}
|
||||
|
||||
{{ return(variables) }}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,57 +0,0 @@
|
||||
{% macro set_streamline_parameters(model_name, model_type, multiplier=1) %}
|
||||
|
||||
{%- set rpc_config_details = {
|
||||
"blocks_transactions": {
|
||||
"method": 'eth_getBlockByNumber',
|
||||
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE)',
|
||||
"exploded_key": ['result', 'result.transactions']
|
||||
},
|
||||
"receipts_by_hash": {
|
||||
"method": 'eth_getTransactionReceipt',
|
||||
"method_params": 'ARRAY_CONSTRUCT(tx_hash)'
|
||||
},
|
||||
"receipts": {
|
||||
"method": 'eth_getBlockReceipts',
|
||||
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number))',
|
||||
"exploded_key": ['result'],
|
||||
"lambdas": 2
|
||||
|
||||
},
|
||||
"traces": {
|
||||
"method": 'debug_traceBlockByNumber',
|
||||
"method_params": "ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s'))",
|
||||
"exploded_key": ['result'],
|
||||
"lambdas": 2
|
||||
},
|
||||
"confirm_blocks": {
|
||||
"method": 'eth_getBlockByNumber',
|
||||
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), FALSE)'
|
||||
}
|
||||
} -%}
|
||||
|
||||
{%- set rpc_config = rpc_config_details[model_name.lower()] -%}
|
||||
|
||||
{%- set params = {
|
||||
"external_table": var((model_name ~ '_' ~ model_type ~ '_external_table').upper(), model_name.lower()),
|
||||
"sql_limit": var((model_name ~ '_' ~ model_type ~ '_sql_limit').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier),
|
||||
"producer_batch_size": var((model_name ~ '_' ~ model_type ~ '_producer_batch_size').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier),
|
||||
"worker_batch_size": var(
|
||||
(model_name ~ '_' ~ model_type ~ '_worker_batch_size').upper(),
|
||||
(2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier) // (rpc_config.get('lambdas', 1))
|
||||
),
|
||||
"sql_source": (model_name ~ '_' ~ model_type).lower(),
|
||||
"method": rpc_config['method'],
|
||||
"method_params": rpc_config['method_params']
|
||||
} -%}
|
||||
|
||||
{%- if rpc_config.get('exploded_key') is not none -%}
|
||||
{%- do params.update({"exploded_key": tojson(rpc_config['exploded_key'])}) -%}
|
||||
{%- endif -%}
|
||||
|
||||
{%- if rpc_config.get('lambdas') is not none -%}
|
||||
{%- do params.update({"lambdas": rpc_config['lambdas']}) -%}
|
||||
{%- endif -%}
|
||||
|
||||
{{ return(params) }}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,25 +0,0 @@
|
||||
{% macro create_aws_optimism_api() %}
|
||||
{{ log("Creating integration for target:" ~ target) }}
|
||||
{% if target.name == "prod" %}
|
||||
{% set sql %}
|
||||
CREATE api integration IF NOT EXISTS aws_optimism_api_prod api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/snowflake-api-optimism' api_allowed_prefixes = (
|
||||
'https://s7qxto6wkd.execute-api.us-east-1.amazonaws.com/prod/'
|
||||
) enabled = TRUE;
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% elif target.name == "dev" %}
|
||||
{% set sql %}
|
||||
CREATE api integration IF NOT EXISTS aws_optimism_api_dev api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/snowflake-api-optimism' api_allowed_prefixes = (
|
||||
'https://ngiz4ozok1.execute-api.us-east-1.amazonaws.com/dev/'
|
||||
) enabled = TRUE;
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% elif target.name == "sbx" %}
|
||||
{% set sql %}
|
||||
CREATE api integration IF NOT EXISTS aws_optimism_api_sbx_shah api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::579011195466:role/snowflake-api-optimism' api_allowed_prefixes = (
|
||||
'https://3ifufl19z4.execute-api.us-east-1.amazonaws.com/sbx/'
|
||||
) enabled = TRUE;
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
@ -1,23 +0,0 @@
|
||||
{% macro create_udtf_get_base_table(schema) %}
|
||||
create or replace function {{ schema }}.udtf_get_base_table(max_height integer)
|
||||
returns table (height number)
|
||||
as
|
||||
$$
|
||||
with base as (
|
||||
select
|
||||
row_number() over (
|
||||
order by
|
||||
seq4()
|
||||
) as id
|
||||
from
|
||||
table(generator(rowcount => 1000000000))
|
||||
)
|
||||
select
|
||||
id as height
|
||||
from
|
||||
base
|
||||
where
|
||||
id <= max_height
|
||||
$$
|
||||
;
|
||||
{% endmacro %}
|
||||
@ -1,42 +0,0 @@
|
||||
{% macro create_udf_get_chainhead() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_get_chainhead() returns variant api_integration =
|
||||
{% if target.name == "prod" %}
|
||||
aws_optimism_api AS 'https://s7qxto6wkd.execute-api.us-east-1.amazonaws.com/prod/get_chainhead'
|
||||
{% else %}
|
||||
aws_optimism_api_dev AS 'https://ngiz4ozok1.execute-api.us-east-1.amazonaws.com/dev/get_chainhead'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_bulk_json_rpc() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_json_rpc(
|
||||
json variant
|
||||
) returns text api_integration = {% if target.name == "prod" %}
|
||||
aws_optimism_api AS 'https://s7qxto6wkd.execute-api.us-east-1.amazonaws.com/prod/udf_bulk_json_rpc'
|
||||
{% else %}
|
||||
aws_optimism_api_dev AS 'https://ngiz4ozok1.execute-api.us-east-1.amazonaws.com/dev/udf_bulk_json_rpc'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_bulk_decode_logs() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_decode_logs(
|
||||
json OBJECT
|
||||
) returns ARRAY api_integration = {% if target.name == "prod" %}
|
||||
aws_optimism_api AS 'https://s7qxto6wkd.execute-api.us-east-1.amazonaws.com/prod/bulk_decode_logs'
|
||||
{% else %}
|
||||
aws_optimism_api_dev AS'https://ngiz4ozok1.execute-api.us-east-1.amazonaws.com/dev/bulk_decode_logs'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_bulk_get_traces() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_get_traces(
|
||||
json variant
|
||||
) returns text api_integration = {% if target.name == "prod" %}
|
||||
aws_optimism_api AS 'https://s7qxto6wkd.execute-api.us-east-1.amazonaws.com/prod/udf_bulk_get_traces'
|
||||
{% else %}
|
||||
aws_optimism_api_dev AS 'https://ngiz4ozok1.execute-api.us-east-1.amazonaws.com/dev/udf_bulk_get_traces'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
@ -1,26 +0,0 @@
|
||||
{% test missing_decoded_logs(model) %}
|
||||
SELECT
|
||||
l.block_number,
|
||||
CONCAT(
|
||||
l.tx_hash,
|
||||
'-',
|
||||
l.event_index
|
||||
) AS _log_id
|
||||
FROM
|
||||
{{ ref('core__fact_event_logs') }}
|
||||
l
|
||||
LEFT JOIN {{ model }}
|
||||
d
|
||||
ON l.block_number = d.block_number
|
||||
AND CONCAT(
|
||||
l.tx_hash,
|
||||
'-',
|
||||
l.event_index
|
||||
) = d._log_id
|
||||
WHERE
|
||||
l.contract_address = LOWER('0x4200000000000000000000000000000000000006') -- WETH
|
||||
AND l.topics [0] :: STRING = '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef' -- Transfer
|
||||
AND l.block_timestamp BETWEEN DATEADD('hour', -48, SYSDATE())
|
||||
AND DATEADD('hour', -6, SYSDATE())
|
||||
AND d._log_id IS NULL
|
||||
{% endtest %}
|
||||
@ -1,124 +0,0 @@
|
||||
{% macro missing_txs(
|
||||
model
|
||||
) %}
|
||||
WITH txs_base AS (
|
||||
SELECT
|
||||
block_number AS base_block_number,
|
||||
tx_hash AS base_tx_hash
|
||||
FROM
|
||||
{{ ref('test_silver__transactions_full') }}
|
||||
),
|
||||
model_name AS (
|
||||
SELECT
|
||||
block_number AS model_block_number,
|
||||
tx_hash AS model_tx_hash
|
||||
FROM
|
||||
{{ model }}
|
||||
)
|
||||
SELECT
|
||||
base_block_number,
|
||||
base_tx_hash,
|
||||
model_block_number,
|
||||
model_tx_hash
|
||||
FROM
|
||||
txs_base
|
||||
LEFT JOIN model_name
|
||||
ON base_block_number = model_block_number
|
||||
AND base_tx_hash = model_tx_hash
|
||||
WHERE
|
||||
(
|
||||
model_tx_hash IS NULL
|
||||
OR model_block_number IS NULL
|
||||
)
|
||||
AND
|
||||
base_block_number NOT IN (
|
||||
SELECT
|
||||
block_number
|
||||
FROM
|
||||
{{ ref('silver_observability__excluded_receipt_blocks') }}
|
||||
)
|
||||
{% endmacro %}
|
||||
|
||||
{% macro recent_missing_txs(
|
||||
model
|
||||
) %}
|
||||
WITH txs_base AS (
|
||||
SELECT
|
||||
block_number AS base_block_number,
|
||||
tx_hash AS base_tx_hash
|
||||
FROM
|
||||
{{ ref('test_silver__transactions_recent') }}
|
||||
),
|
||||
model_name AS (
|
||||
SELECT
|
||||
block_number AS model_block_number,
|
||||
tx_hash AS model_tx_hash
|
||||
FROM
|
||||
{{ model }}
|
||||
)
|
||||
SELECT
|
||||
base_block_number,
|
||||
base_tx_hash,
|
||||
model_block_number,
|
||||
model_tx_hash
|
||||
FROM
|
||||
txs_base
|
||||
LEFT JOIN model_name
|
||||
ON base_block_number = model_block_number
|
||||
AND base_tx_hash = model_tx_hash
|
||||
WHERE
|
||||
(model_tx_hash IS NULL
|
||||
OR model_block_number IS NULL)
|
||||
AND
|
||||
base_block_number NOT IN (
|
||||
SELECT
|
||||
block_number
|
||||
FROM
|
||||
{{ ref('silver_observability__excluded_receipt_blocks') }}
|
||||
)
|
||||
{% endmacro %}
|
||||
|
||||
{% macro missing_confirmed_txs(
|
||||
model1,
|
||||
model2
|
||||
) %}
|
||||
WITH txs_base AS (
|
||||
SELECT
|
||||
block_number AS base_block_number,
|
||||
block_hash AS base_block_hash,
|
||||
tx_hash AS base_tx_hash
|
||||
FROM
|
||||
{{ model1 }}
|
||||
),
|
||||
model_name AS (
|
||||
SELECT
|
||||
block_number AS model_block_number,
|
||||
block_hash AS model_block_hash,
|
||||
tx_hash AS model_tx_hash
|
||||
FROM
|
||||
{{ model2 }}
|
||||
)
|
||||
SELECT
|
||||
DISTINCT base_block_number AS block_number
|
||||
FROM
|
||||
txs_base
|
||||
LEFT JOIN model_name
|
||||
ON base_block_number = model_block_number
|
||||
AND base_tx_hash = model_tx_hash
|
||||
AND base_block_hash = model_block_hash
|
||||
WHERE
|
||||
model_tx_hash IS NULL
|
||||
AND model_block_number <= (
|
||||
SELECT
|
||||
MAX(base_block_number)
|
||||
FROM
|
||||
txs_base
|
||||
)
|
||||
AND
|
||||
base_block_number NOT IN (
|
||||
SELECT
|
||||
block_number
|
||||
FROM
|
||||
{{ ref('silver_observability__excluded_receipt_blocks') }}
|
||||
)
|
||||
{% endmacro %}
|
||||
@ -1,78 +0,0 @@
|
||||
{% macro if_data_call_function(
|
||||
func,
|
||||
target
|
||||
) %}
|
||||
{% if var(
|
||||
"STREAMLINE_INVOKE_STREAMS"
|
||||
) %}
|
||||
{% if execute %}
|
||||
{{ log(
|
||||
"Running macro `if_data_call_function`: Calling udf " ~ func ~ " on " ~ target,
|
||||
True
|
||||
) }}
|
||||
{% endif %}
|
||||
SELECT
|
||||
{{ func }}
|
||||
WHERE
|
||||
EXISTS(
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
{{ target }}
|
||||
LIMIT
|
||||
1
|
||||
)
|
||||
{% else %}
|
||||
{% if execute %}
|
||||
{{ log(
|
||||
"Running macro `if_data_call_function`: NOOP",
|
||||
False
|
||||
) }}
|
||||
{% endif %}
|
||||
SELECT
|
||||
NULL
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro if_data_call_wait() %}
|
||||
{% if var(
|
||||
"STREAMLINE_INVOKE_STREAMS"
|
||||
) %}
|
||||
{% set query %}
|
||||
SELECT
|
||||
1
|
||||
WHERE
|
||||
EXISTS(
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
{{ model.schema ~ "." ~ model.alias }}
|
||||
LIMIT
|
||||
1
|
||||
) {% endset %}
|
||||
{% if execute %}
|
||||
{% set results = run_query(
|
||||
query
|
||||
) %}
|
||||
{% if results %}
|
||||
{{ log(
|
||||
"Waiting...",
|
||||
info = True
|
||||
) }}
|
||||
|
||||
{% set wait_query %}
|
||||
SELECT
|
||||
system$wait(
|
||||
{{ var(
|
||||
"WAIT",
|
||||
600
|
||||
) }}
|
||||
) {% endset %}
|
||||
{% do run_query(wait_query) %}
|
||||
{% else %}
|
||||
SELECT
|
||||
NULL;
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
@ -1,88 +0,0 @@
|
||||
{{ config (
|
||||
materialized = "ephemeral"
|
||||
) }}
|
||||
|
||||
WITH retry AS (
|
||||
|
||||
SELECT
|
||||
contract_address,
|
||||
GREATEST(
|
||||
latest_call_block,
|
||||
latest_event_block
|
||||
) AS block_number,
|
||||
total_interaction_count
|
||||
FROM
|
||||
{{ ref("silver__relevant_contracts") }}
|
||||
r
|
||||
LEFT JOIN {{ source(
|
||||
'optimism_silver',
|
||||
'verified_abis'
|
||||
) }}
|
||||
v USING (contract_address)
|
||||
WHERE
|
||||
r.total_interaction_count >= 250 -- high interaction count
|
||||
AND GREATEST(
|
||||
max_inserted_timestamp_logs,
|
||||
max_inserted_timestamp_traces
|
||||
) >= CURRENT_DATE - INTERVAL '30 days' -- recent activity
|
||||
AND v.contract_address IS NULL -- no verified abi
|
||||
AND r.contract_address NOT IN (
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ source(
|
||||
'optimism_bronze_api',
|
||||
'contract_abis'
|
||||
) }}
|
||||
WHERE
|
||||
_inserted_timestamp >= CURRENT_DATE - INTERVAL '30 days' -- this won't let us retry the same contract within 30 days
|
||||
AND abi_data :data :result :: STRING <> 'Max rate limit reached'
|
||||
)
|
||||
|
||||
ORDER BY
|
||||
total_interaction_count DESC
|
||||
LIMIT
|
||||
25
|
||||
), FINAL AS (
|
||||
SELECT
|
||||
proxy_address AS contract_address,
|
||||
start_block AS block_number
|
||||
FROM
|
||||
{{ ref("silver__proxies") }}
|
||||
p
|
||||
JOIN retry r USING (contract_address)
|
||||
LEFT JOIN {{ source(
|
||||
'optimism_silver',
|
||||
'verified_abis'
|
||||
) }}
|
||||
v
|
||||
ON v.contract_address = p.proxy_address
|
||||
WHERE
|
||||
v.contract_address IS NULL
|
||||
AND p.contract_address NOT IN (
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ source(
|
||||
'optimism_bronze_api',
|
||||
'contract_abis'
|
||||
) }}
|
||||
WHERE
|
||||
_inserted_timestamp >= CURRENT_DATE - INTERVAL '30 days' -- this won't let us retry the same contract within 30 days
|
||||
AND abi_data :data :result :: STRING <> 'Max rate limit reached'
|
||||
)
|
||||
UNION ALL
|
||||
SELECT
|
||||
contract_address,
|
||||
block_number
|
||||
FROM
|
||||
retry
|
||||
)
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
FINAL qualify ROW_NUMBER() over (
|
||||
PARTITION BY contract_address
|
||||
ORDER BY
|
||||
block_number DESC
|
||||
) = 1
|
||||
@ -1,79 +0,0 @@
|
||||
{{ config(
|
||||
materialized = 'incremental',
|
||||
unique_key = "contract_address",
|
||||
full_refresh = false,
|
||||
tags = ['curated']
|
||||
) }}
|
||||
|
||||
WITH base AS (
|
||||
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ ref('silver__relevant_contracts') }}
|
||||
WHERE
|
||||
total_interaction_count >= 100
|
||||
|
||||
{% if is_incremental() %}
|
||||
and contract_address not in (
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ this }}
|
||||
WHERE
|
||||
abi_data :data :result :: STRING <> 'Max rate limit reached'
|
||||
)
|
||||
{% endif %}
|
||||
order by total_interaction_count desc
|
||||
LIMIT
|
||||
400
|
||||
), all_contracts AS (
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
base
|
||||
UNION
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ ref('_retry_abis') }}
|
||||
),
|
||||
row_nos AS (
|
||||
SELECT
|
||||
contract_address,
|
||||
ROW_NUMBER() over (
|
||||
ORDER BY
|
||||
contract_address
|
||||
) AS row_no
|
||||
FROM
|
||||
all_contracts
|
||||
),
|
||||
batched AS ({% for item in range(501) %}
|
||||
SELECT
|
||||
rn.contract_address,
|
||||
live.udf_api(
|
||||
'GET',
|
||||
CONCAT('https://api-optimistic.etherscan.io/api?module=contract&action=getabi&address=', rn.contract_address, '&apikey={key}'),
|
||||
OBJECT_CONSTRUCT(
|
||||
'Content-Type', 'application/json',
|
||||
'fsc-quantum-state', 'livequery'
|
||||
),
|
||||
NULL,
|
||||
'Vault/prod/block_explorers/optimism_scan'
|
||||
) AS abi_data,
|
||||
SYSDATE() AS _inserted_timestamp
|
||||
FROM
|
||||
row_nos rn
|
||||
WHERE
|
||||
row_no = {{ item }}
|
||||
|
||||
{% if not loop.last %}
|
||||
UNION ALL
|
||||
{% endif %}
|
||||
{% endfor %})
|
||||
SELECT
|
||||
contract_address,
|
||||
abi_data,
|
||||
_inserted_timestamp
|
||||
FROM
|
||||
batched
|
||||
@ -1,23 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: bronze_api__contract_abis
|
||||
|
||||
columns:
|
||||
- name: _INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: day
|
||||
interval: 1
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_NTZ
|
||||
- TIMESTAMP_LTZ
|
||||
- name: CONTRACT_ADDRESS
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- VARCHAR
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: "^(0x)[0-9a-fA-F]{40}$"
|
||||
@ -1,130 +0,0 @@
|
||||
{{ config(
|
||||
materialized = 'incremental',
|
||||
unique_key = "contract_address",
|
||||
full_refresh = false,
|
||||
tags = ['non_realtime']
|
||||
) }}
|
||||
|
||||
WITH base AS (
|
||||
|
||||
SELECT
|
||||
contract_address,
|
||||
latest_event_block AS latest_block
|
||||
FROM
|
||||
{{ ref('silver__relevant_contracts') }}
|
||||
WHERE
|
||||
total_event_count >= 25
|
||||
|
||||
{% if is_incremental() %}
|
||||
AND contract_address NOT IN (
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
total_event_count DESC
|
||||
LIMIT
|
||||
500
|
||||
), function_sigs AS (
|
||||
SELECT
|
||||
'0x313ce567' AS function_sig,
|
||||
'decimals' AS function_name
|
||||
UNION
|
||||
SELECT
|
||||
'0x06fdde03',
|
||||
'name'
|
||||
UNION
|
||||
SELECT
|
||||
'0x95d89b41',
|
||||
'symbol'
|
||||
),
|
||||
all_reads AS (
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
base
|
||||
JOIN function_sigs
|
||||
ON 1 = 1
|
||||
),
|
||||
ready_reads AS (
|
||||
SELECT
|
||||
contract_address,
|
||||
latest_block,
|
||||
function_sig,
|
||||
RPAD(
|
||||
function_sig,
|
||||
64,
|
||||
'0'
|
||||
) AS input,
|
||||
utils.udf_json_rpc_call(
|
||||
'eth_call',
|
||||
[{'to': contract_address, 'from': null, 'data': input}, utils.udf_int_to_hex(latest_block)],
|
||||
concat_ws(
|
||||
'-',
|
||||
contract_address,
|
||||
input,
|
||||
latest_block
|
||||
)
|
||||
) AS rpc_request
|
||||
FROM
|
||||
all_reads
|
||||
),
|
||||
batch_reads AS (
|
||||
SELECT
|
||||
ARRAY_AGG(rpc_request) AS batch_rpc_request
|
||||
FROM
|
||||
ready_reads
|
||||
),
|
||||
node_call AS (
|
||||
SELECT
|
||||
*,
|
||||
live.udf_api(
|
||||
'POST',
|
||||
CONCAT(
|
||||
'{service}',
|
||||
'/',
|
||||
'{Authentication}'
|
||||
),{},
|
||||
batch_rpc_request,
|
||||
'Vault/prod/optimism/quicknode/mainnet'
|
||||
) AS response
|
||||
FROM
|
||||
batch_reads
|
||||
WHERE
|
||||
EXISTS (
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
ready_reads
|
||||
LIMIT
|
||||
1
|
||||
)
|
||||
), flat_responses AS (
|
||||
SELECT
|
||||
VALUE :id :: STRING AS call_id,
|
||||
VALUE :result :: STRING AS read_result
|
||||
FROM
|
||||
node_call,
|
||||
LATERAL FLATTEN (
|
||||
input => response :data
|
||||
)
|
||||
)
|
||||
SELECT
|
||||
SPLIT_PART(
|
||||
call_id,
|
||||
'-',
|
||||
1
|
||||
) AS contract_address,
|
||||
SPLIT_PART(
|
||||
call_id,
|
||||
'-',
|
||||
3
|
||||
) AS block_number,
|
||||
LEFT(SPLIT_PART(call_id, '-', 2), 10) AS function_sig,
|
||||
NULL AS function_input,
|
||||
read_result,
|
||||
SYSDATE() :: TIMESTAMP AS _inserted_timestamp
|
||||
FROM
|
||||
flat_responses
|
||||
@ -1,19 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: bronze_api__token_reads
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- CONTRACT_ADDRESS
|
||||
- FUNCTION_SIG
|
||||
columns:
|
||||
- name: _INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: day
|
||||
interval: 3
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_NTZ
|
||||
- TIMESTAMP_LTZ
|
||||
@ -2,7 +2,7 @@
|
||||
materialized = 'incremental',
|
||||
incremental_strategy = 'delete+insert',
|
||||
unique_key = "state_block_number",
|
||||
tags = ['ethereum','non_realtime']
|
||||
tags = ['bronze','ethereum']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
@ -40,4 +40,4 @@ WHERE
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
@ -2,7 +2,7 @@
|
||||
materialized = 'incremental',
|
||||
incremental_strategy = 'delete+insert',
|
||||
unique_key = "l1_submission_block_number",
|
||||
tags = ['ethereum','non_realtime']
|
||||
tags = ['bronze','ethereum']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
@ -38,4 +38,4 @@ WHERE
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
@ -1,25 +0,0 @@
|
||||
{{ config(
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
system_created_at,
|
||||
insert_date,
|
||||
blockchain,
|
||||
address,
|
||||
creator,
|
||||
label_type,
|
||||
label_subtype,
|
||||
address_name,
|
||||
project_name,
|
||||
_is_deleted,
|
||||
modified_timestamp,
|
||||
labels_combined_id
|
||||
FROM
|
||||
{{ source(
|
||||
'silver_crosschain',
|
||||
'labels_combined'
|
||||
) }}
|
||||
WHERE
|
||||
blockchain = 'optimism'
|
||||
AND address LIKE '0x%'
|
||||
@ -1,80 +0,0 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['overflowed_traces']
|
||||
) }}
|
||||
|
||||
{% for item in range(
|
||||
1,
|
||||
11
|
||||
) %}
|
||||
|
||||
SELECT
|
||||
o.file_name,
|
||||
f.block_number,
|
||||
f.index_vals,
|
||||
f.path,
|
||||
f.key,
|
||||
f.value_
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
file_name,
|
||||
file_url,
|
||||
index_cols,
|
||||
[overflowed_block, overflowed_tx] AS index_vals
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
block_number,
|
||||
POSITION,
|
||||
file_name,
|
||||
file_url,
|
||||
index_cols,
|
||||
VALUE [0] AS overflowed_block,
|
||||
VALUE [1] AS overflowed_tx,
|
||||
block_number = overflowed_block
|
||||
AND POSITION = overflowed_tx AS missing
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
block_number,
|
||||
POSITION,
|
||||
file_name,
|
||||
file_url,
|
||||
index_cols,
|
||||
utils.udf_detect_overflowed_responses(
|
||||
file_url,
|
||||
index_cols
|
||||
) AS index_vals
|
||||
FROM
|
||||
{{ ref("bronze__potential_overflowed_traces") }}
|
||||
WHERE
|
||||
row_no = {{ item }}
|
||||
),
|
||||
LATERAL FLATTEN (
|
||||
input => index_vals
|
||||
)
|
||||
)
|
||||
WHERE
|
||||
missing = TRUE
|
||||
) o,
|
||||
TABLE(
|
||||
utils.udtf_flatten_overflowed_responses(
|
||||
o.file_url,
|
||||
o.index_cols,
|
||||
[o.index_vals]
|
||||
)
|
||||
) f
|
||||
WHERE
|
||||
NOT IS_OBJECT(
|
||||
f.value_
|
||||
)
|
||||
AND NOT IS_ARRAY(
|
||||
f.value_
|
||||
)
|
||||
AND NOT IS_NULL_VALUE(
|
||||
f.value_
|
||||
) {% if not loop.last %}
|
||||
UNION ALL
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
@ -1,80 +0,0 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['overflowed_traces']
|
||||
) }}
|
||||
|
||||
WITH impacted_blocks AS (
|
||||
|
||||
SELECT
|
||||
blocks_impacted_array
|
||||
FROM
|
||||
{{ ref("silver_observability__traces_completeness") }}
|
||||
ORDER BY
|
||||
test_timestamp DESC
|
||||
LIMIT
|
||||
1
|
||||
), all_missing AS (
|
||||
SELECT
|
||||
DISTINCT VALUE :: INT AS block_number
|
||||
FROM
|
||||
impacted_blocks,
|
||||
LATERAL FLATTEN (
|
||||
input => blocks_impacted_array
|
||||
)
|
||||
),
|
||||
all_txs AS (
|
||||
SELECT
|
||||
block_number,
|
||||
POSITION AS tx_position,
|
||||
tx_hash
|
||||
FROM
|
||||
{{ ref("silver__transactions") }}
|
||||
JOIN all_missing USING (block_number)
|
||||
),
|
||||
missing_txs AS (
|
||||
SELECT
|
||||
DISTINCT txs.block_number,
|
||||
txs.tx_position,
|
||||
file_name
|
||||
FROM
|
||||
all_txs txs
|
||||
LEFT JOIN {{ source(
|
||||
"optimism_gold",
|
||||
"fact_traces"
|
||||
) }}
|
||||
tr2 USING (
|
||||
block_number,
|
||||
tx_position
|
||||
)
|
||||
JOIN {{ ref("streamline__traces_complete") }} USING (block_number)
|
||||
LEFT JOIN {{ source(
|
||||
'optimism_silver',
|
||||
'overflowed_traces'
|
||||
) }}
|
||||
ot USING (
|
||||
block_number,
|
||||
tx_position
|
||||
)
|
||||
WHERE
|
||||
tr2.block_number IS NULL
|
||||
AND ot.block_number IS NULL
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
tx_position AS POSITION,
|
||||
file_name,
|
||||
build_scoped_file_url(
|
||||
@streamline.bronze.OPTIMISM_SERVERLESS_PROD,
|
||||
file_name
|
||||
) AS file_url,
|
||||
['block_number', 'array_index'] AS index_cols,
|
||||
ROW_NUMBER() over (
|
||||
ORDER BY
|
||||
block_number ASC,
|
||||
POSITION ASC
|
||||
) AS row_no
|
||||
FROM
|
||||
missing_txs
|
||||
ORDER BY
|
||||
block_number ASC,
|
||||
POSITION ASC
|
||||
@ -1,26 +0,0 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
asset_id,
|
||||
symbol,
|
||||
NAME,
|
||||
decimals,
|
||||
blockchain,
|
||||
is_deprecated,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_native_asset_metadata_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'silver_crosschain',
|
||||
'complete_native_asset_metadata'
|
||||
) }}
|
||||
WHERE
|
||||
blockchain = 'ethereum'
|
||||
AND symbol = 'ETH'
|
||||
@ -1,29 +0,0 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
HOUR,
|
||||
asset_id,
|
||||
symbol,
|
||||
NAME,
|
||||
decimals,
|
||||
price,
|
||||
blockchain,
|
||||
is_imputed,
|
||||
is_deprecated,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_native_prices_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'silver_crosschain',
|
||||
'complete_native_prices'
|
||||
) }}
|
||||
WHERE
|
||||
blockchain = 'ethereum'
|
||||
AND symbol = 'ETH'
|
||||
@ -1,29 +0,0 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
asset_id,
|
||||
token_address,
|
||||
NAME,
|
||||
symbol,
|
||||
platform,
|
||||
platform_id,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_provider_asset_metadata_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'silver_crosschain',
|
||||
'complete_provider_asset_metadata'
|
||||
) }}
|
||||
WHERE
|
||||
platform IN (
|
||||
'Optimism',
|
||||
'optimistic-ethereum'
|
||||
)
|
||||
-- platforms specific to Optimism
|
||||
@ -1,24 +0,0 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
asset_id,
|
||||
recorded_hour,
|
||||
OPEN,
|
||||
high,
|
||||
low,
|
||||
CLOSE,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_provider_prices_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'silver_crosschain',
|
||||
'complete_provider_prices'
|
||||
) }}
|
||||
-- prices for all ids
|
||||
@ -1,28 +0,0 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
token_address,
|
||||
asset_id,
|
||||
symbol,
|
||||
NAME,
|
||||
decimals,
|
||||
blockchain,
|
||||
blockchain_name,
|
||||
blockchain_id,
|
||||
is_deprecated,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_token_asset_metadata_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'silver_crosschain',
|
||||
'complete_token_asset_metadata'
|
||||
) }}
|
||||
WHERE
|
||||
blockchain = 'optimism'
|
||||
@ -1,31 +0,0 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
HOUR,
|
||||
token_address,
|
||||
asset_id,
|
||||
symbol,
|
||||
NAME,
|
||||
decimals,
|
||||
price,
|
||||
blockchain,
|
||||
blockchain_name,
|
||||
blockchain_id,
|
||||
is_imputed,
|
||||
is_deprecated,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_token_prices_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'silver_crosschain',
|
||||
'complete_token_prices'
|
||||
) }}
|
||||
WHERE
|
||||
blockchain = 'optimism'
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs op_batch_size %}
|
||||
|
||||
Total Optimism Txs included within batch.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs op_batch_root %}
|
||||
|
||||
Root of batch, either for sumbission or state.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs op_l1_block_no %}
|
||||
|
||||
The Ethereum block number that contained the batch.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs op_l1_block_time %}
|
||||
|
||||
The timestamp of the Ethereum block that contained this batch.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs op_l1_fee_scalar %}
|
||||
|
||||
This value covers the change in L1 gas price between the time the transaction is submitted and when it is published.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs op_l1_gas_price %}
|
||||
|
||||
The gas price for L1 transactions when the transaction was processed.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs op_l1_gas_used %}
|
||||
|
||||
The gas used on L1 to publish the transaction.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs op_l1_state_batch %}
|
||||
|
||||
The batch index of when this block was included in the Ethereum state root. This column will be deprecated 8/7 and will be consolidated into a array column consisting of all L1 submission details.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs op_l1_state_tx_hash %}
|
||||
|
||||
The L1 tx hash of when this block was included in the Ethereum state root. This column will be deprecated 8/7 and will be consolidated into a array column consisting of all L1 submission details.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs op_l1_sub_batch %}
|
||||
|
||||
The batch index of when this block was submitted to L1. This column will be deprecated 8/7 and will be consolidated into a array column consisting of all L1 submission details.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs op_l1_sub_tx_hash %}
|
||||
|
||||
The L1 tx hash of when this block was submitted to L1. This column will be deprecated 8/7 and will be consolidated into a array column consisting of all L1 submission details.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs op_max_block %}
|
||||
|
||||
The max block on Optimism this batch relates to.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs op_min_block %}
|
||||
|
||||
The min block on Optimism this batch relates to.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs op_prev_total_elements %}
|
||||
|
||||
Confirmed blocks prior to this batch.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs opt_block_header_json %}
|
||||
|
||||
This JSON column contains the block header details.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs opt_blockchain %}
|
||||
|
||||
The blockchain on which transactions are being confirmed.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs opt_blocks_hash %}
|
||||
|
||||
The hash of the block header for a given block.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs opt_blocks_nonce %}
|
||||
|
||||
Block nonce is a value used during mining to demonstrate proof of work for a given block.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs opt_blocks_table_doc %}
|
||||
|
||||
This table contains block level data for the Optimism Blockchain. This table can be used to analyze trends at a block level, for example gas fees vs. total transactions over time. For more information on EVM transactions, please see [Etherscan Resources](https://etherscan.io/directory/Learning_Resources/Ethereum) or [The Ethereum Organization](https://ethereum.org/en/developers/docs/blocks/)
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs opt_difficulty %}
|
||||
|
||||
The effort required to mine the block.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs opt_extra_data %}
|
||||
|
||||
Any data included by the validator for a given block.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs opt_gas_limit %}
|
||||
|
||||
Total gas limit provided by all transactions in the block.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs opt_gas_used %}
|
||||
|
||||
Total gas used in the block.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs opt_miner %}
|
||||
|
||||
Miner who successfully added a given block to the blockchain.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs opt_network %}
|
||||
|
||||
The network on the blockchain used by a transaction.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs opt_parent_hash %}
|
||||
|
||||
The hash of the block from which a given block is generated. Also known as the parent block.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs opt_receipts_root %}
|
||||
|
||||
The root of the state trie.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs opt_sha3_uncles %}
|
||||
|
||||
The mechanism which Ethereum Javascript RLP encodes an empty string.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs opt_size %}
|
||||
|
||||
Block size, which is determined by a given block's gas limit.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs opt_total_difficulty %}
|
||||
|
||||
Total difficulty of the chain at a given block.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs opt_tx_count %}
|
||||
|
||||
Total number of transactions within a block.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs opt_uncle_blocks %}
|
||||
|
||||
Uncle blocks occur when two blocks are mined and broadcasted at the same time, with the same block number. The block validated across the most nodes will be added to the primary chain, and the other one becomes an uncle block. Miners do receive rewards for uncle blocks.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,83 +0,0 @@
|
||||
{% docs evm_bridge_table_doc %}
|
||||
|
||||
A convenience table that aggregates bridge activity from event_logs, traces and transfers, including bridge deposits and transfers sent from the following protocols: ACROSS, AXELAR, CELER, CBRIDGE, DLN, DEBRIDGE, EYWA, HOP, MESON, MULTICHAIN, STARGATE, SYMBIOSIS, SYNAPSE, WORMHOLE along with other helpful columns, including an amount USD where available. Note, this table only includes records for the protocols listed above with live, onchain bridge activity and may not represent the complete bridging picture.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs evm_bridge_platform %}
|
||||
|
||||
The platform or protocol from which the bridge transaction or event originates.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs evm_bridge_origin_from %}
|
||||
|
||||
The from address where the transaction originated from. This may be an EOA or contract address, however in most cases this is the user that initiated the bridge deposit or transfer.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs evm_bridge_sender %}
|
||||
|
||||
The address that initiated the bridge deposit or transfer. This address is the sender of the tokens/assets being bridged to the destination chain. This may be an EOA or contract address.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs evm_bridge_receiver %}
|
||||
|
||||
The designated address set to receive the deposit or transfer. This may be an EOA or contract address.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs evm_bridge_destination_chain_receiver %}
|
||||
|
||||
The designated address set to receive the bridged tokens on the target chain after the completion of the bridge transaction. For non-evm chains, the hex address is decoded/encoded to match the data format of the destination chain, where possible. This may be an EOA or contract address.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs evm_bridge_destination_chain %}
|
||||
|
||||
The name of the blockchain network to which the assets are being bridged. It could be any EVM compatible chain or other blockchain networks that the bridging protocol supports.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs evm_bridge_destination_chain_id %}
|
||||
|
||||
The numeric identifier associated with the destination blockchain network. This is specific to the chain and helps in uniquely identifying it.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs evm_bridge_address %}
|
||||
|
||||
The address of the contract responsible for handling the bridge deposit or transfer. This contract mediates the transfer and ensures that assets are sent and received appropriately.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs evm_bridge_token_address %}
|
||||
|
||||
The address associated with the token that is being bridged. It provides a unique identifier for the token within its origin blockchain.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs evm_bridge_token_symbol %}
|
||||
|
||||
The symbol representing the token being bridged. This provides a shorthand representation of the token.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs evm_bridge_amount_unadj %}
|
||||
|
||||
The raw, non-decimal adjusted amount of tokens involved in the bridge transaction.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs evm_bridge_amount %}
|
||||
|
||||
The decimal adjusted amount of tokens involved in the bridge transaction, where available.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs evm_bridge_amount_usd %}
|
||||
|
||||
The value of the bridged tokens in USD at the time of the bridge transaction, where available.
|
||||
|
||||
{% enddocs %}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user