mirror of
https://github.com/FlipsideCrypto/ethereum-models.git
synced 2026-02-06 11:27:00 +00:00
AN-5989/ethereum-migration (#1055)
* repo clean up * silver changes * silver streamline * bronze sl * macros * wfs and decoded traces macro * gold tags * bronze tags * beacon tags * silver tags * silver nft tags * defi tags * silver protocol tags * workflow crons * remove seed * source * refs * docs * docs * sources * remove complete function abis * bronze issues * curated updates * column names * column changes * column changes * tag
This commit is contained in:
parent
40c2d2ddbc
commit
a8d19a517c
27
.github/workflows/dbt_alter_all_gha_tasks.yml
vendored
Normal file
27
.github/workflows/dbt_alter_all_gha_tasks.yml
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
name: dbt_alter_all_gha_tasks
|
||||
run-name: dbt_alter_all_gha_tasks
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
task_action:
|
||||
type: choice
|
||||
description: Action to perform on all tasks
|
||||
required: true
|
||||
options:
|
||||
- RESUME
|
||||
- SUSPEND
|
||||
default: RESUME
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_alter_all_gha_tasks.yml@pre-release/v4-beta
|
||||
with:
|
||||
task_action: ${{ inputs.task_action }}
|
||||
target: prod
|
||||
secrets: inherit
|
||||
53
.github/workflows/dbt_alter_gha_task.yml
vendored
53
.github/workflows/dbt_alter_gha_task.yml
vendored
@ -1,53 +0,0 @@
|
||||
name: dbt_alter_gha_task
|
||||
run-name: dbt_alter_gha_task
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
workflow_name:
|
||||
type: string
|
||||
description: Name of the workflow to perform the action on, no .yml extension
|
||||
required: true
|
||||
task_action:
|
||||
type: choice
|
||||
description: Action to perform
|
||||
required: true
|
||||
options:
|
||||
- SUSPEND
|
||||
- RESUME
|
||||
default: SUSPEND
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_alter_gha_tasks.yml@AN-4374/upgrade-dbt-1.7
|
||||
with:
|
||||
workflow_name: |
|
||||
${{ inputs.workflow_name }}
|
||||
task_action: |
|
||||
${{ inputs.task_action }}
|
||||
environment: workflow_prod
|
||||
secrets: inherit
|
||||
|
||||
notify-failure:
|
||||
needs: [called_workflow_template]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
32
.github/workflows/dbt_alter_gha_tasks.yml
vendored
Normal file
32
.github/workflows/dbt_alter_gha_tasks.yml
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
name: dbt_alter_gha_tasks
|
||||
run-name: dbt_alter_gha_tasks
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
workflow_name:
|
||||
type: string
|
||||
description: Name of the workflow to perform the action on, no .yml extension
|
||||
required: true
|
||||
task_action:
|
||||
type: choice
|
||||
description: Action to perform
|
||||
required: true
|
||||
options:
|
||||
- SUSPEND
|
||||
- RESUME
|
||||
default: SUSPEND
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_alter_gha_tasks.yml@pre-release/v4-beta
|
||||
with:
|
||||
workflow_name: ${{ inputs.workflow_name }}
|
||||
task_action: ${{ inputs.task_action }}
|
||||
target: prod
|
||||
secrets: inherit
|
||||
19
.github/workflows/dbt_deploy_new_workflows.yml
vendored
Normal file
19
.github/workflows/dbt_deploy_new_workflows.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: dbt_deploy_new_workflows
|
||||
run-name: dbt_deploy_new_workflows
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Deploy New Github Actions
|
||||
command: |
|
||||
make deploy_new_gha_tasks DBT_TARGET=prod
|
||||
secrets: inherit
|
||||
71
.github/workflows/dbt_docs_update.yml
vendored
71
.github/workflows/dbt_docs_update.yml
vendored
@ -5,75 +5,10 @@ on:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
USE_VARS: "${{ vars.USE_VARS }}"
|
||||
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
|
||||
DBT_VERSION: "${{ vars.DBT_VERSION }}"
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: refresh ddl for datashare
|
||||
run: |
|
||||
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
|
||||
- name: checkout docs branch
|
||||
run: |
|
||||
git checkout -B docs origin/main
|
||||
- name: generate dbt docs
|
||||
run: dbt docs generate -t prod
|
||||
|
||||
- name: move files to docs directory
|
||||
run: |
|
||||
mkdir -p ./docs
|
||||
cp target/{catalog.json,manifest.json,index.html} docs/
|
||||
- name: clean up target directory
|
||||
run: dbt clean
|
||||
|
||||
- name: check for changes
|
||||
run: git status
|
||||
|
||||
- name: stage changed files
|
||||
run: git add .
|
||||
|
||||
- name: commit changed files
|
||||
run: |
|
||||
git config user.email "abc@xyz"
|
||||
git config user.name "github-actions"
|
||||
git commit -am "Auto-update docs"
|
||||
- name: push changes to docs
|
||||
run: |
|
||||
git push -f --set-upstream origin docs
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_docs_update.yml@pre-release/v4-beta
|
||||
secrets: inherit
|
||||
33
.github/workflows/dbt_integration_test.yml
vendored
33
.github/workflows/dbt_integration_test.yml
vendored
@ -3,39 +3,20 @@ run-name: ${{ github.event.inputs.branch }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
environment:
|
||||
required: true
|
||||
type: string
|
||||
required: true
|
||||
|
||||
concurrency: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
prepare_vars:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: ${{ inputs.environment }}
|
||||
outputs:
|
||||
warehouse: ${{ steps.set_outputs.outputs.warehouse }}
|
||||
steps:
|
||||
- name: Set warehouse output
|
||||
id: set_outputs
|
||||
run: |
|
||||
echo "warehouse=${{ vars.WAREHOUSE }}" >> $GITHUB_OUTPUT
|
||||
|
||||
called_workflow_template:
|
||||
needs: prepare_vars
|
||||
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt.yml@main
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_integration_test.yml@pre-release/v4-beta
|
||||
with:
|
||||
command: >
|
||||
target: ${{ inputs.environment }}
|
||||
command: |
|
||||
dbt test --selector 'integration_tests'
|
||||
environment: ${{ inputs.environment }}
|
||||
warehouse: ${{ needs.prepare_vars.outputs.warehouse }}
|
||||
secrets: inherit
|
||||
|
||||
notify-failure:
|
||||
needs: [called_workflow_template]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
secrets: inherit
|
||||
56
.github/workflows/dbt_run_abi_refresh.yml
vendored
56
.github/workflows/dbt_run_abi_refresh.yml
vendored
@ -1,56 +0,0 @@
|
||||
name: dbt_run_abi_refresh
|
||||
run-name: dbt_run_abi_refresh
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Update ABI models
|
||||
run: |
|
||||
dbt run -m "ethereum_models,tag:abis"
|
||||
|
||||
- name: Kick off decoded history, if there are new ABIs from users
|
||||
run: |
|
||||
dbt run-operation run_decoded_history
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
62
.github/workflows/dbt_run_adhoc.yml
vendored
62
.github/workflows/dbt_run_adhoc.yml
vendored
@ -1,13 +1,13 @@
|
||||
name: dbt_run_adhoc
|
||||
run-name: dbt_run_adhoc
|
||||
run-name: ${{ inputs.dbt_command }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
environment:
|
||||
type: choice
|
||||
target:
|
||||
type: choice
|
||||
description: DBT Run Environment
|
||||
required: true
|
||||
options:
|
||||
@ -15,61 +15,29 @@ on:
|
||||
- prod
|
||||
default: dev
|
||||
warehouse:
|
||||
type: choice
|
||||
type: choice
|
||||
description: Snowflake warehouse
|
||||
required: true
|
||||
required: true
|
||||
options:
|
||||
- DBT
|
||||
- DBT_CLOUD
|
||||
- DBT_EMERGENCY
|
||||
- DBT_MEGA
|
||||
- BACKFILL
|
||||
default: DBT
|
||||
dbt_command:
|
||||
type: string
|
||||
description: "DBT Run Command"
|
||||
description: 'DBT Run Command'
|
||||
required: true
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ inputs.warehouse }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_${{ inputs.environment }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
${{ inputs.dbt_command }}
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
warehouse: ${{ inputs.warehouse }}
|
||||
target: ${{ inputs.target }}
|
||||
command_name: Run DBT Command
|
||||
command: ${{ inputs.dbt_command }}
|
||||
secrets: inherit
|
||||
78
.github/workflows/dbt_run_dev_refresh.yml
vendored
78
.github/workflows/dbt_run_dev_refresh.yml
vendored
@ -3,81 +3,13 @@ run-name: dbt_run_dev_refresh
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs "Monday at 4:00 UTC" (see https://crontab.guru)
|
||||
- cron: "0 4 * * 1"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs_refresh:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run-operation run_sp_create_prod_clone
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs_refresh]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
|
||||
run_dbt_jobs_udfs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: run_dbt_jobs_refresh
|
||||
environment:
|
||||
name: workflow_dev
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run Recreate UDFs
|
||||
run: |
|
||||
dbt run-operation fsc_utils.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
|
||||
dbt run -s livequery_models.deploy.core._live --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
|
||||
|
||||
notify-failure2:
|
||||
needs: [run_dbt_jobs_udfs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_dev_refresh.yml@pre-release/v4-beta
|
||||
secrets: inherit
|
||||
53
.github/workflows/dbt_run_full_observability.yml
vendored
53
.github/workflows/dbt_run_full_observability.yml
vendored
@ -3,50 +3,21 @@ run-name: dbt_run_full_observability
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs “At 00:00 on day-of-month 1.” (see https://crontab.guru)
|
||||
- cron: "0 0 1 * *"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod_2xl
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
warehouse: DBT_EMERGENCY
|
||||
command_name: Run Observability Models
|
||||
command: |
|
||||
dbt run --threads 2 --vars '{"MAIN_OBSERV_FULL_TEST_ENABLED":True}' -m "fsc_evm,tag:observability"
|
||||
secrets: inherit
|
||||
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --threads 2 --vars '{"OBSERV_FULL_TEST":True}' -m "ethereum_models,tag:observability"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
|
||||
53
.github/workflows/dbt_run_heal_models.yml
vendored
53
.github/workflows/dbt_run_heal_models.yml
vendored
@ -3,50 +3,17 @@ run-name: dbt_run_heal_models
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs at 04:55 on Wednesday (see https://crontab.guru)
|
||||
- cron: "55 4 * * 3"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "ethereum_models,tag:heal" --vars '{"HEAL_MODEL":True}'
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Heal Models
|
||||
command: |
|
||||
dbt run -m "$PROJECT_NAME,tag:heal" --vars '{"HEAL_MODEL":True}'
|
||||
secrets: inherit
|
||||
51
.github/workflows/dbt_run_nft_list.yml
vendored
51
.github/workflows/dbt_run_nft_list.yml
vendored
@ -3,50 +3,17 @@ run-name: dbt_run_nft_list
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# At minute 0 past hour 0 and 12. (see https://crontab.guru)
|
||||
- cron: "0 0,12 * * *"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "ethereum_models,tag:nft_list"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Curated Models
|
||||
command: |
|
||||
dbt run -m "ethereum_models,tag:nft_list"
|
||||
secrets: inherit
|
||||
51
.github/workflows/dbt_run_nft_reads.yml
vendored
51
.github/workflows/dbt_run_nft_reads.yml
vendored
@ -3,50 +3,17 @@ run-name: dbt_run_nft_reads
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# At minute 0. (see https://crontab.guru)
|
||||
- cron: "0 * * * *"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "ethereum_models,tag:nft_reads"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Curated Models
|
||||
command: |
|
||||
dbt run -m "ethereum_models,tag:nft_reads"
|
||||
secrets: inherit
|
||||
|
||||
59
.github/workflows/dbt_run_operation_reorg.yml
vendored
59
.github/workflows/dbt_run_operation_reorg.yml
vendored
@ -1,59 +0,0 @@
|
||||
name: dbt_run_operation_reorg
|
||||
run-name: dbt_run_operation_reorg
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs at minute 55 every Sunday (see https://crontab.guru)
|
||||
- cron: "55 0 * * 0"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: List reorg models
|
||||
id: list_models
|
||||
run: |
|
||||
reorg_model_list=$(dbt list --select "ethereum_models,tag:reorg" --resource-type model --output name | grep '__' | awk -F'.' '{print $NF}' | tr '\n' ',' | sed 's/,$//')
|
||||
echo "model_list=$reorg_model_list" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Execute block_reorg macro
|
||||
run: |
|
||||
dbt run-operation fsc_utils.block_reorg --args "{reorg_model_list: '${{ steps.list_models.outputs.model_list }}', hours: '169'}" && awk '/SQL status/ {print; next} /DELETE FROM/{getline; print} /\/\* {/ {print}' logs/dbt.log
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
23
.github/workflows/dbt_run_scheduled_abis.yml
vendored
Normal file
23
.github/workflows/dbt_run_scheduled_abis.yml
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
name: dbt_run_scheduled_abis
|
||||
run-name: dbt_run_scheduled_abis
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run ABI Models
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:gold,tag:abis"
|
||||
command_name_2: Kick off decoded logs history, if there are new user submitted ABIs
|
||||
command_2: |
|
||||
dbt run-operation fsc_evm.run_decoded_logs_history
|
||||
dbt run-operation run_decoded_traces_history
|
||||
secrets: inherit
|
||||
46
.github/workflows/dbt_run_scheduled_beacon.yml
vendored
46
.github/workflows/dbt_run_scheduled_beacon.yml
vendored
@ -6,46 +6,14 @@ on:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "ethereum_models,tag:beacon"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Beacon Models
|
||||
command: |
|
||||
dbt run -m "ethereum_models,tag:beacon"
|
||||
secrets: inherit
|
||||
46
.github/workflows/dbt_run_scheduled_curated.yml
vendored
46
.github/workflows/dbt_run_scheduled_curated.yml
vendored
@ -6,46 +6,14 @@ on:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "ethereum_models,tag:curated"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Curated Models
|
||||
command: |
|
||||
dbt run -m "$PROJECT_NAME,tag:curated" "fsc_evm,tag:curated"
|
||||
secrets: inherit
|
||||
19
.github/workflows/dbt_run_scheduled_decoder.yml
vendored
Normal file
19
.github/workflows/dbt_run_scheduled_decoder.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: dbt_run_scheduled_decoder
|
||||
run-name: dbt_run_scheduled_decoder
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Decoder Models
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:bronze,tag:decoded_logs" "fsc_evm,tag:silver,tag:decoded_logs" "fsc_evm,tag:gold,tag:decoded_logs" "ethereum_models,tag:bronze,tag:decoded_traces" "ethereum_models,tag:silver,tag:decoded_traces" "ethereum_models,tag:gold,tag:decoded_traces"
|
||||
secrets: inherit
|
||||
19
.github/workflows/dbt_run_scheduled_main.yml
vendored
Normal file
19
.github/workflows/dbt_run_scheduled_main.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: dbt_run_scheduled_main
|
||||
run-name: dbt_run_scheduled_main
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Main Models
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:bronze,tag:core" "fsc_evm,tag:silver,tag:core" "fsc_evm,tag:gold,tag:core" "fsc_evm,tag:silver,tag:prices" "fsc_evm,tag:gold,tag:prices" "fsc_evm,tag:silver,tag:labels" "fsc_evm,tag:gold,tag:labels" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "ethereum_models,tag:silver,tag:balances" "ethereum_models,tag:gold,tag:balances" "ethereum_models,tag:silver,tag:reads" "ethereum_models,tag:gold,tag:reads" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" "ethereum_models,tag:streamline_balances_realtime" "ethereum_models,tag:streamline_balances_complete" "ethereum_models,tag:streamline_decoded_traces_complete" "ethereum_models,tag:streamline_decoded_traces_realtime" --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
|
||||
secrets: inherit
|
||||
@ -1,51 +0,0 @@
|
||||
name: dbt_run_scheduled_non_realtime
|
||||
run-name: dbt_run_scheduled_non_realtime
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "ethereum_models,tag:non_realtime" "ethereum_models,tag:realtime" "ethereum_models,tag:streamline_decoded_logs_realtime" "ethereum_models,tag:streamline_decoded_logs_complete" "ethereum_models,tag:streamline_balances_realtime" "ethereum_models,tag:streamline_balances_complete" "ethereum_models,tag:streamline_decoded_traces_complete" "ethereum_models,tag:streamline_decoded_traces_realtime"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
20
.github/workflows/dbt_run_scheduled_scores.yml
vendored
Normal file
20
.github/workflows/dbt_run_scheduled_scores.yml
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
|
||||
name: dbt_run_scheduled_scores
|
||||
run-name: dbt_run_scheduled_scores
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Scores Models
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:scores"
|
||||
secrets: inherit
|
||||
52
.github/workflows/dbt_run_streamline_abis.yml
vendored
52
.github/workflows/dbt_run_streamline_abis.yml
vendored
@ -1,52 +0,0 @@
|
||||
name: dbt_run_streamline_abis
|
||||
run-name: dbt_run_streamline_abis
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs “At minute 20 past every 2nd hour from 1 through 23.” (see https://crontab.guru)
|
||||
- cron: "20 1-23/2 * * *"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "ethereum_models,tag:streamline_abis_realtime" "ethereum_models,tag:streamline_abis_complete"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
49
.github/workflows/dbt_run_streamline_beacon.yml
vendored
49
.github/workflows/dbt_run_streamline_beacon.yml
vendored
@ -6,46 +6,11 @@ on:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "ethereum_models,tag:streamline_beacon_realtime" "ethereum_models,tag:streamline_beacon_complete"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Beacon Models
|
||||
command: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "ethereum_models,tag:streamline_beacon_realtime" "ethereum_models,tag:streamline_beacon_complete"
|
||||
secrets: inherit
|
||||
|
||||
@ -6,50 +6,15 @@ on:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "ethereum_models,tag:streamline_core_complete" "ethereum_models,tag:streamline_core_realtime" "ethereum_models,tag:streamline_core_complete_receipts" "ethereum_models,tag:streamline_core_realtime_receipts" "ethereum_models,tag:streamline_core_complete_confirm_blocks" "ethereum_models,tag:streamline_core_realtime_confirm_blocks"
|
||||
|
||||
- name: Run Chainhead Tests
|
||||
run: |
|
||||
dbt test -m "ethereum_models,tag:chainhead"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Chainhead Models
|
||||
command: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:chainhead" "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash"
|
||||
dbt test -m "fsc_evm,tag:chainhead"
|
||||
secrets: inherit
|
||||
@ -1,56 +0,0 @@
|
||||
name: dbt_run_streamline_decoded_logs_history
|
||||
run-name: dbt_run_streamline_decoded_logs_history
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Update complete table
|
||||
run: |
|
||||
dbt run -m "ethereum_models,tag:streamline_decoded_logs_complete"
|
||||
|
||||
- name: Decode historical logs
|
||||
run: |
|
||||
dbt run-operation decoded_logs_history --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
@ -1,54 +0,0 @@
|
||||
name: dbt_run_streamline_decoded_traces_history
|
||||
run-name: dbt_run_streamline_decoded_traces_history
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Update complete table
|
||||
run: |
|
||||
dbt run -m "fsc_evm,tag:streamline_decoded_traces_complete"
|
||||
- name: Decode historical traces
|
||||
run: |
|
||||
dbt run-operation decoded_traces_history --args '{"backfill_mode": false}' --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
51
.github/workflows/dbt_run_streamline_decoder.yml
vendored
51
.github/workflows/dbt_run_streamline_decoder.yml
vendored
@ -1,51 +0,0 @@
|
||||
name: dbt_run_streamline_decoder
|
||||
run-name: dbt_run_streamline_decoder
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "ethereum_models,tag:decoded_logs" "ethereum_models,tag:decoded_traces"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
23
.github/workflows/dbt_run_streamline_decoder_history.yml
vendored
Normal file
23
.github/workflows/dbt_run_streamline_decoder_history.yml
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
name: dbt_run_streamline_decoder_history
|
||||
run-name: dbt_run_streamline_decoder_history
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Decoder Complete
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" "ethereum_models,tag:streamline,tag:decoded_traces"
|
||||
command_name_2: Run Streamline Decoder History
|
||||
command_2: |
|
||||
dbt run-operation fsc_evm.decoded_logs_history --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
|
||||
dbt run-operation decoded_traces_history --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
|
||||
secrets: inherit
|
||||
19
.github/workflows/dbt_run_streamline_history.yml
vendored
Normal file
19
.github/workflows/dbt_run_streamline_history.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: dbt_run_streamline_history
|
||||
run-name: dbt_run_streamline_history
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run History Models
|
||||
command: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:history" --exclude "fsc_evm,tag:receipts_by_hash"
|
||||
secrets: inherit
|
||||
@ -1,79 +0,0 @@
|
||||
name: dbt_run_streamline_history_adhoc
|
||||
run-name: dbt_run_streamline_history_adhoc
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
environment:
|
||||
type: choice
|
||||
description: DBT Run Environment
|
||||
required: true
|
||||
options:
|
||||
- dev
|
||||
- prod
|
||||
- prod_backfill
|
||||
default: dev
|
||||
warehouse:
|
||||
type: choice
|
||||
description: Snowflake warehouse
|
||||
required: true
|
||||
options:
|
||||
- DBT
|
||||
- DBT_CLOUD
|
||||
- DBT_EMERGENCY
|
||||
default: DBT
|
||||
dbt_command:
|
||||
type: choice
|
||||
description: DBT Run Command
|
||||
required: true
|
||||
options:
|
||||
- dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "ethereum_models,tag:streamline_core_complete" "ethereum_models,tag:streamline_core_history" "ethereum_models,tag:streamline_core_complete_receipts" "ethereum_models,tag:streamline_core_history_receipts" "ethereum_models,tag:streamline_core_complete_confirm_blocks" "ethereum_models,tag:streamline_core_history_confirm_blocks"
|
||||
- dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "ethereum_models,tag:streamline_beacon_history" "ethereum_models,tag:streamline_beacon_complete"
|
||||
- dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "ethereum_models,tag:streamline_balances_history" "ethereum_models,tag:streamline_balances_complete"
|
||||
- dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "ethereum_models,tag:streamline_abis_history" "ethereum_models,tag:streamline_abis_complete"
|
||||
- dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "ethereum_models,tag:streamline_reads_history" "ethereum_models,tag:streamline_reads_complete tag:streamline_reads_curated"
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ inputs.warehouse }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_${{ inputs.environment }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10.x"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
${{ inputs.dbt_command }}
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
54
.github/workflows/dbt_run_streamline_reads.yml
vendored
54
.github/workflows/dbt_run_streamline_reads.yml
vendored
@ -3,50 +3,14 @@ run-name: dbt_run_streamline_reads
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs “At minute 40 past every 2nd hour from 1 through 23.” (see https://crontab.guru)
|
||||
- cron: "40 1-23/2 * * *"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "ethereum_models,tag:streamline_reads_curated" "ethereum_models,tag:streamline_reads_realtime" "ethereum_models,tag:streamline_reads_complete"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run History Models
|
||||
command: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "ethereum_models,tag:streamline_reads_curated" "ethereum_models,tag:streamline_reads_realtime" "ethereum_models,tag:streamline_reads_complete"
|
||||
secrets: inherit
|
||||
|
||||
52
.github/workflows/dbt_temp_balances_catchup.yml
vendored
52
.github/workflows/dbt_temp_balances_catchup.yml
vendored
@ -1,52 +0,0 @@
|
||||
name: dbt_temp_balances_catchup
|
||||
run-name: dbt_temp_balances_catchup
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs at minute 21 every hour
|
||||
- cron: "21 * * * *"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m models/streamline/silver/balances/history/streamline__token_balances_history.sql models/streamline/silver/balances/complete/streamline__complete_token_balances.sql
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
51
.github/workflows/dbt_test_beacon.yml
vendored
51
.github/workflows/dbt_test_beacon.yml
vendored
@ -3,50 +3,17 @@ run-name: dbt_test_beacon
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs "at 9:05 UTC" (see https://crontab.guru)
|
||||
- cron: "5 9 * * *"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt test -m "ethereum_models,tag:beacon"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
target: test
|
||||
command_name: Run Beacon Tests
|
||||
command: dbt test -m "ethereum_models,tag:beacon"
|
||||
secrets: inherit
|
||||
59
.github/workflows/dbt_test_daily.yml
vendored
59
.github/workflows/dbt_test_daily.yml
vendored
@ -1,52 +1,23 @@
|
||||
name: dbt_test_daily
|
||||
name: dbt_test_daily
|
||||
run-name: dbt_test_daily
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs "at 9:00 UTC" (see https://crontab.guru)
|
||||
- cron: "0 9 * * *"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt test --exclude "ethereum_models,tag:full_test" "ethereum_models,tag:recent_test" "ethereum_models,tag:beacon" "ethereum_models,tag:gha_tasks" livequery_models
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
target: test
|
||||
command_name: Build Daily Testing Views
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:daily_test"
|
||||
command_name_2: Run Daily Tests (all tests excluding full, recent and misc. others)
|
||||
command_2: |
|
||||
dbt test --exclude "fsc_evm,tag:full_test" "fsc_evm,tag:recent_test" "fsc_evm,tag:gha_tasks" livequery_models
|
||||
secrets: inherit
|
||||
51
.github/workflows/dbt_test_intraday.yml
vendored
51
.github/workflows/dbt_test_intraday.yml
vendored
@ -5,48 +5,17 @@ on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "ethereum_models,tag:observability"
|
||||
dbt test -m "ethereum_models,tag:recent_test"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
target: test
|
||||
command_name: Run Observability & Recent Tests
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:observability"
|
||||
dbt test -m "fsc_evm,tag:recent_test"
|
||||
secrets: inherit
|
||||
56
.github/workflows/dbt_test_monthly.yml
vendored
56
.github/workflows/dbt_test_monthly.yml
vendored
@ -3,49 +3,21 @@ run-name: dbt_test_monthly
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs “28th of month at 12AM” (see https://crontab.guru)
|
||||
- cron: "0 0 28 * *"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: "3.7.x"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt test --select "ethereum_models,tag:full_test"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
warehouse: DBT_EMERGENCY
|
||||
command_name: Build Full Testing Views
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:full_test"
|
||||
command_name_2: Run Full Tests
|
||||
command_2: |
|
||||
dbt test -m "fsc_evm,tag:full_test"
|
||||
secrets: inherit
|
||||
27
.github/workflows/slack_notify.yml
vendored
27
.github/workflows/slack_notify.yml
vendored
@ -1,27 +0,0 @@
|
||||
name: Slack Notification
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
notify:
|
||||
runs-on: ubuntu-latest
|
||||
environment: workflow_prod
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install requests
|
||||
|
||||
- name: Send Slack notification
|
||||
run: python python/slack_alert.py
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@ -17,4 +17,4 @@ logs/
|
||||
.env
|
||||
.user.yml
|
||||
dbt-env/
|
||||
package-lock.yml
|
||||
/package-lock.yml
|
||||
138
Makefile
138
Makefile
@ -1,49 +1,109 @@
|
||||
SHELL := /bin/bash
|
||||
DBT_TARGET ?= dev
|
||||
RECEIPTS_BY_HASH_ENABLED ?= false
|
||||
|
||||
dbt-console:
|
||||
docker-compose run dbt_console
|
||||
cleanup_time:
|
||||
@set -e; \
|
||||
rm -f package-lock.yml && dbt clean && dbt deps
|
||||
|
||||
refresh_package:
|
||||
rm -f package-lock.yml
|
||||
dbt clean
|
||||
dbt deps
|
||||
dbt run-operation fsc_utils.create_evm_streamline_udfs --vars '{UPDATE_UDFS_AND_SPS: true}' --target dev-admin
|
||||
deploy_gha_workflows_table:
|
||||
@set -e; \
|
||||
echo "Collecting workflow names..." ; \
|
||||
WORKFLOW_VALUES="" ; \
|
||||
for file in $$(find .github/workflows -name "*.yml" -type f); do \
|
||||
filename=$$(basename "$$file" .yml) ; \
|
||||
if [ -z "$$WORKFLOW_VALUES" ]; then \
|
||||
WORKFLOW_VALUES="('$$filename')" ; \
|
||||
else \
|
||||
WORKFLOW_VALUES="$$WORKFLOW_VALUES,('$$filename')" ; \
|
||||
fi ; \
|
||||
done ; \
|
||||
echo "Found workflows: $$WORKFLOW_VALUES" ; \
|
||||
dbt run-operation create_workflow_table --args "{\"workflow_values\": \"$$WORKFLOW_VALUES\"}" -t $(DBT_TARGET)
|
||||
|
||||
realtime:
|
||||
dbt run -m models/streamline/silver/decoder/realtime/streamline__decode_traces_realtime.sql --vars '{"STREAMLINE_INVOKE_STREAMS":True,"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":True"}' --target dev-admin
|
||||
dbt run -m models/streamline/bronze/decoder/bronze__streamline_decoded_traces.sql --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":True}' --target dev-admin
|
||||
dbt run -m models/silver/core/silver__decoded_traces.sql
|
||||
deploy_gha_tasks:
|
||||
@set -e; \
|
||||
make deploy_gha_workflows_table DBT_TARGET=$(DBT_TARGET); \
|
||||
dbt run -s livequery_models.deploy.marketplace.github --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.create_gha_tasks --vars '{"RESUME_GHA_TASKS":True}' -t $(DBT_TARGET)
|
||||
|
||||
realtime_logs:
|
||||
dbt run -m models/streamline/silver/decoder/realtime/streamline__decode_logs_realtime.sql --vars '{"STREAMLINE_INVOKE_STREAMS":True,"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":True"}' --target dev-admin
|
||||
dbt run -m models/streamline/bronze/decoder/bronze__streamline_decoded_logs.sql --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":True}' --target dev-admin
|
||||
dbt run -m models/silver/core/silver__decoded_logs.sql
|
||||
deploy_new_gha_tasks:
|
||||
@set -e; \
|
||||
make deploy_gha_workflows_table DBT_TARGET=$(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.create_gha_tasks --vars '{"RESUME_GHA_TASKS":True}' -t $(DBT_TARGET)
|
||||
|
||||
history:
|
||||
dbt run -m models/streamline/silver/decoder/history/traces/range_1/streamline__decode_traces_history_011667449_011706397.sql --vars '{"STREAMLINE_INVOKE_STREAMS":True,"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":True"}' --target dev-admin
|
||||
dbt run -m models/streamline/bronze/decoder/bronze__streamline_decoded_traces.sql --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":True}' --target dev-admin
|
||||
dbt run -m models/silver/core/silver__decoded_traces.sql
|
||||
deploy_livequery:
|
||||
@set -e; \
|
||||
dbt run-operation fsc_evm.drop_livequery_schemas --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.livequery_grants --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET)
|
||||
|
||||
history_logs:
|
||||
dbt run -m models/streamline/silver/decoder/history/event_logs/range_0/streamline__decode_logs_history_016532020_016560020.sql --vars '{"STREAMLINE_INVOKE_STREAMS":True,"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":True"}' --target dev-admin
|
||||
dbt run -m models/streamline/bronze/decoder/bronze__streamline_decoded_logs.sql --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":True}' --target dev-admin
|
||||
dbt run -m models/silver/core/silver__decoded_logs.sql
|
||||
deploy_chain_phase_1:
|
||||
@set -e; \
|
||||
dbt run -m livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.livequery_grants --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.call_sample_rpc_node -t $(DBT_TARGET); \
|
||||
if [ "$(DBT_TARGET)" != "prod" ]; then \
|
||||
if [ "$(RECEIPTS_BY_HASH_ENABLED)" = "true" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt test -m "fsc_evm,tag:chainhead"; \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True, "MAIN_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts_by_hash" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt test -m "fsc_evm,tag:chainhead"; \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True, "MAIN_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
|
||||
fi; \
|
||||
else \
|
||||
if [ "$(RECEIPTS_BY_HASH_ENABLED)" = "true" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts" --full-refresh --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt test -m "fsc_evm,tag:chainhead"; \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts_by_hash" --full-refresh --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt test -m "fsc_evm,tag:chainhead"; \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
|
||||
fi; \
|
||||
fi; \
|
||||
echo "# wait ~10 minutes"; \
|
||||
echo "# run deploy_chain_phase_2"
|
||||
|
||||
load_new:
|
||||
dbt run -m models/silver/core/silver__blocks.sql
|
||||
dbt run -m models/silver/core/silver__transactions.sql
|
||||
dbt run -m models/silver/core/silver__receipts.sql
|
||||
dbt run -m models/silver/core/silver__logs.sql
|
||||
dbt run -m models/silver/core/silver__traces.sql
|
||||
deploy_chain_phase_2:
|
||||
@set -e; \
|
||||
if [ "$(DBT_TARGET)" != "prod" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_2" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_BRONZE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_2" --full-refresh --vars '{"GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_BRONZE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
fi; \
|
||||
echo "# wait ~10 minutes"; \
|
||||
echo "# run deploy_chain_phase_3"
|
||||
|
||||
load_abi:
|
||||
dbt run -m models/silver/core/silver__relevant_contracts.sql
|
||||
dbt run -m models/silver/core/silver__created_contracts.sql
|
||||
dbt run -m models/silver/abis --exclude models/silver/abis/event_logs
|
||||
deploy_chain_phase_3:
|
||||
@set -e; \
|
||||
if [ "$(DBT_TARGET)" != "prod" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_2" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:phase_3" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_2" -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:phase_3" --full-refresh --vars '{"GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
|
||||
fi; \
|
||||
echo "# wait ~10 minutes"; \
|
||||
echo "# run deploy_chain_phase_4"
|
||||
|
||||
load_new_and_abi:
|
||||
make load_new
|
||||
make load_abi
|
||||
|
||||
.PHONY: dbt-console refresh_package
|
||||
deploy_chain_phase_4:
|
||||
@set -e; \
|
||||
if [ "$(DBT_TARGET)" != "prod" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_3" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:phase_4" --full-refresh -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_3" -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:phase_4" --full-refresh -t $(DBT_TARGET); \
|
||||
make deploy_gha_tasks DBT_TARGET=$(DBT_TARGET); \
|
||||
fi; \
|
||||
|
||||
.PHONY: cleanup_time deploy_gha_workflows_table deploy_gha_tasks deploy_new_gha_tasks deploy_livequery deploy_chain_phase_1 deploy_chain_phase_2 deploy_chain_phase_3 deploy_chain_phase_4
|
||||
@ -1,11 +0,0 @@
|
||||
workflow_name,workflow_schedule
|
||||
dbt_run_scheduled_non_realtime,"8,38 * * * *"
|
||||
dbt_run_streamline_chainhead,"0,20,40 * * * *"
|
||||
dbt_run_streamline_decoder,"14,44 * * * *"
|
||||
dbt_run_scheduled_curated,"30 */4 * * *"
|
||||
dbt_run_scheduled_beacon,"10 */2 * * *"
|
||||
dbt_run_streamline_beacon,"55 */1 * * *"
|
||||
dbt_test_intraday,"55 */4 * * *"
|
||||
dbt_run_streamline_decoded_logs_history,"42 9 * * 6"
|
||||
dbt_run_streamline_decoded_traces_history,"5 22 * * 6"
|
||||
dbt_run_abi_refresh,"29 23 * * *"
|
||||
|
92
data/observability__exclusion_list.csv
Normal file
92
data/observability__exclusion_list.csv
Normal file
@ -0,0 +1,92 @@
|
||||
block_number
|
||||
3804951
|
||||
3804282
|
||||
3805213
|
||||
3805274
|
||||
3805144
|
||||
3804909
|
||||
3804375
|
||||
3805171
|
||||
3805210
|
||||
3804357
|
||||
3804917
|
||||
3805111
|
||||
3805195
|
||||
3804880
|
||||
3805005
|
||||
3804985
|
||||
3805056
|
||||
3805047
|
||||
3805177
|
||||
3805129
|
||||
3804887
|
||||
3804982
|
||||
3804898
|
||||
3804931
|
||||
3804939
|
||||
3805099
|
||||
3805119
|
||||
3805027
|
||||
3804885
|
||||
3805174
|
||||
3804945
|
||||
3805147
|
||||
3804005
|
||||
3805207
|
||||
3804216
|
||||
3805224
|
||||
3805042
|
||||
3805126
|
||||
3805141
|
||||
3805091
|
||||
3804347
|
||||
3805247
|
||||
3805132
|
||||
3805262
|
||||
3804934
|
||||
3804958
|
||||
3804962
|
||||
3804299
|
||||
3804906
|
||||
3804953
|
||||
3805228
|
||||
3805016
|
||||
3805222
|
||||
3805064
|
||||
3805072
|
||||
3804222
|
||||
3805115
|
||||
3805279
|
||||
3804874
|
||||
3804913
|
||||
3804992
|
||||
3805084
|
||||
3805258
|
||||
3805157
|
||||
3804893
|
||||
3805061
|
||||
3805019
|
||||
3804973
|
||||
3804920
|
||||
3805230
|
||||
3804901
|
||||
3805266
|
||||
3805038
|
||||
3804969
|
||||
3805190
|
||||
3805200
|
||||
3805153
|
||||
3805067
|
||||
3804865
|
||||
3804988
|
||||
3804975
|
||||
3804926
|
||||
3805010
|
||||
3804315
|
||||
3804309
|
||||
3805272
|
||||
3805249
|
||||
3804227
|
||||
3805202
|
||||
3805033
|
||||
3805022
|
||||
|
138
dbt_project.yml
138
dbt_project.yml
@ -17,6 +17,8 @@ test-paths: ["tests"]
|
||||
seed-paths: ["data"]
|
||||
macro-paths: ["macros"]
|
||||
snapshot-paths: ["snapshots"]
|
||||
docs-paths:
|
||||
["dbt_packages/fsc_evm/doc_descriptions", "models/doc_descriptions", "models"]
|
||||
|
||||
target-path: "target" # directory which will store compiled SQL files
|
||||
clean-targets: # directories to be removed by `dbt clean`
|
||||
@ -24,19 +26,11 @@ clean-targets: # directories to be removed by `dbt clean`
|
||||
- "dbt_modules"
|
||||
- "dbt_packages"
|
||||
|
||||
models:
|
||||
+copy_grants: true
|
||||
+persist_docs:
|
||||
relation: true
|
||||
columns: true
|
||||
+on_schema_change: "append_new_columns"
|
||||
|
||||
tests:
|
||||
+store_failures: true # all tests
|
||||
|
||||
on-run-start:
|
||||
- "{{ create_sps() }}"
|
||||
- "{{ sp_create_load_nft_metadata() }}"
|
||||
- "{{ create_udfs() }}"
|
||||
|
||||
on-run-end:
|
||||
@ -56,6 +50,67 @@ query-comment:
|
||||
# Configuring models
|
||||
# Full documentation: https://docs.getdbt.com/docs/configuring-models
|
||||
|
||||
models:
|
||||
ethereum_models: # replace with the name of the chain
|
||||
+copy_grants: true
|
||||
+persist_docs:
|
||||
relation: true
|
||||
columns: true
|
||||
+on_schema_change: "append_new_columns"
|
||||
livequery_models:
|
||||
+materialized: ephemeral
|
||||
fsc_evm:
|
||||
+enabled: false # disable fsc_evm package by default
|
||||
+copy_grants: true
|
||||
+persist_docs:
|
||||
relation: true
|
||||
columns: true
|
||||
+on_schema_change: "append_new_columns"
|
||||
main_package:
|
||||
+enabled: false # disable top level package by default, enabled subpackages as needed
|
||||
admin:
|
||||
+enabled: true
|
||||
core:
|
||||
+enabled: true # enable subpackages, as needed
|
||||
bronze:
|
||||
+enabled: false
|
||||
token_reads:
|
||||
+enabled: true
|
||||
github_actions:
|
||||
+enabled: true
|
||||
labels:
|
||||
+enabled: true
|
||||
observability:
|
||||
+enabled: true
|
||||
prices:
|
||||
+enabled: true
|
||||
utils:
|
||||
+enabled: true
|
||||
decoder_package:
|
||||
+enabled: false
|
||||
abis:
|
||||
+enabled: false
|
||||
gold:
|
||||
+enabled: true
|
||||
silver:
|
||||
+enabled: true
|
||||
streamline:
|
||||
+enabled: true
|
||||
decoded_logs:
|
||||
+enabled: false
|
||||
gold:
|
||||
+enabled: true
|
||||
silver:
|
||||
+enabled: true
|
||||
streamline:
|
||||
+enabled: true
|
||||
curated_package:
|
||||
+enabled: false
|
||||
stats:
|
||||
+enabled: true
|
||||
scores_package:
|
||||
+enabled: false
|
||||
|
||||
# In this example config, we tell dbt to build all models in the example/ directory
|
||||
# as tables. These settings can be overridden in the individual model files
|
||||
# using the `{{ config(...) }}` macro.
|
||||
@ -68,11 +123,9 @@ vars:
|
||||
STREAMLINE_RUN_HISTORY: False
|
||||
UPDATE_SNOWFLAKE_TAGS: True
|
||||
WAIT: 0
|
||||
OBSERV_FULL_TEST: False
|
||||
HEAL_MODEL: False
|
||||
HEAL_MODELS: []
|
||||
LOAD_CUSTOM_FUNCTIONS: False
|
||||
START_GHA_TASKS: False
|
||||
|
||||
#### STREAMLINE 2.0 BEGIN ####
|
||||
|
||||
@ -98,67 +151,4 @@ vars:
|
||||
- INTERNAL_DEV
|
||||
- DBT_CLOUD_ETHEREUM
|
||||
|
||||
#### STREAMLINE 2.0 END ####
|
||||
|
||||
#### FSC_EVM BEGIN ####
|
||||
# Visit https://github.com/FlipsideCrypto/fsc-evm/wiki for more information on required and optional variables
|
||||
|
||||
### GLOBAL VARIABLES BEGIN ###
|
||||
## REQUIRED
|
||||
GLOBAL_PROD_DB_NAME: 'ethereum'
|
||||
GLOBAL_NODE_SECRET_PATH: 'Vault/prod/ethereum/quicknode/ethereum_mainnet'
|
||||
GLOBAL_BLOCKS_PER_HOUR: 300
|
||||
GLOBAL_USES_STREAMLINE_V1: True
|
||||
|
||||
### GLOBAL VARIABLES END ###
|
||||
|
||||
### MAIN_PACKAGE VARIABLES BEGIN ###
|
||||
|
||||
### CORE ###
|
||||
## REQUIRED
|
||||
|
||||
BLOCKS_TRANSACTIONS_REALTIME_EXTERNAL_TABLE: 'blocks_v2'
|
||||
BLOCKS_TRANSACTIONS_HISTORY_EXTERNAL_TABLE: 'blocks_v2'
|
||||
TRACES_REALTIME_EXTERNAL_TABLE: 'traces_v2'
|
||||
TRACES_HISTORY_EXTERNAL_TABLE: 'traces_v2'
|
||||
RECEIPTS_REALTIME_EXTERNAL_TABLE: 'receipts_v2'
|
||||
RECEIPTS_HISTORY_EXTERNAL_TABLE: 'receipts_v2'
|
||||
CONFIRM_BLOCKS_REALTIME_EXTERNAL_TABLE: 'confirm_blocks_v2'
|
||||
CONFIRM_BLOCKS_HISTORY_EXTERNAL_TABLE: 'confirm_blocks_v2'
|
||||
|
||||
## OPTIONAL
|
||||
# GOLD_FULL_REFRESH: True
|
||||
# SILVER_FULL_REFRESH: True
|
||||
# BRONZE_FULL_REFRESH: True
|
||||
|
||||
# BLOCKS_COMPLETE_FULL_REFRESH: True
|
||||
# CONFIRM_BLOCKS_COMPLETE_FULL_REFRESH: True
|
||||
# TRACES_COMPLETE_FULL_REFRESH: True
|
||||
# RECEIPTS_COMPLETE_FULL_REFRESH: True
|
||||
# TRANSACTIONS_COMPLETE_FULL_REFRESH: True
|
||||
|
||||
# BLOCKS_TRANSACTIONS_REALTIME_TESTING_LIMIT: 3
|
||||
# BLOCKS_TRANSACTIONS_HISTORY_TESTING_LIMIT: 3
|
||||
# TRACES_REALTIME_TESTING_LIMIT: 3
|
||||
# TRACES_HISTORY_TESTING_LIMIT: 3
|
||||
# RECEIPTS_REALTIME_TESTING_LIMIT: 3
|
||||
# RECEIPTS_HISTORY_TESTING_LIMIT: 3
|
||||
# CONFIRM_BLOCKS_REALTIME_TESTING_LIMIT: 3
|
||||
# CONFIRM_BLOCKS_HISTORY_TESTING_LIMIT: 3
|
||||
|
||||
# ### MAIN_PACKAGE VARIABLES END ###
|
||||
|
||||
# ### DECODER_PACKAGE VARIABLES BEGIN ###
|
||||
|
||||
# ## REQUIRED
|
||||
|
||||
# ## OPTIONAL
|
||||
|
||||
# DECODED_LOGS_COMPLETE_FULL_REFRESH: True
|
||||
|
||||
# DECODED_LOGS_REALTIME_TESTING_LIMIT: 3
|
||||
# DECODED_LOGS_HISTORY_SQL_LIMIT: 1 #limit per monthly range
|
||||
|
||||
### DECODER_PACKAGE VARIABLES END ###
|
||||
|
||||
#### FSC_EVM END ####
|
||||
#### STREAMLINE 2.0 END ####
|
||||
@ -1,42 +1,9 @@
|
||||
{% macro create_udfs() %}
|
||||
{% if var("UPDATE_UDFS_AND_SPS") %}
|
||||
{% if var("UPDATE_UDFS_AND_SPS", false) %}
|
||||
{% set sql %}
|
||||
CREATE schema if NOT EXISTS silver;
|
||||
{{ create_udf_transform_logs(
|
||||
schema = 'silver'
|
||||
) }}
|
||||
{{ create_udtf_get_base_table(
|
||||
schema = "streamline"
|
||||
) }}
|
||||
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% if target.database != "ETHEREUM_COMMUNITY_DEV" %}
|
||||
{% set sql %}
|
||||
{{ create_udf_get_chainhead() }}
|
||||
{{ create_udf_get_beacon_chainhead() }}
|
||||
{{ create_udf_call_eth_node() }}
|
||||
{{ create_udf_call_node() }}
|
||||
{{ create_udf_call_read_batching() }}
|
||||
{{ create_udf_api() }}
|
||||
{{ create_udf_load_nft_metadata() }}
|
||||
{{ create_udf_get_token_balances() }}
|
||||
{{ create_udf_get_eth_balances() }}
|
||||
{{ create_udf_get_reads() }}
|
||||
{{ create_udf_get_contract_abis() }}
|
||||
{{ create_udf_get_blocks() }}
|
||||
{{ create_udf_get_transactions() }}
|
||||
{{ create_udf_get_beacon_blocks() }}
|
||||
{{ create_udf_decode_array_string() }}
|
||||
{{ create_udf_decode_array_object() }}
|
||||
{{ create_udf_rest_api() }}
|
||||
{{ create_udf_bulk_decode_logs() }}
|
||||
{{ create_udf_bulk_decode_traces() }}
|
||||
{{ create_udf_json_rpc() }}
|
||||
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% endif %}
|
||||
{{- fsc_utils.create_udfs() -}}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
{% endmacro %}
|
||||
@ -1,124 +0,0 @@
|
||||
{% macro decoded_logs_history(backfill_mode=false) %}
|
||||
|
||||
{%- set params = {
|
||||
"sql_limit": var("DECODED_LOGS_HISTORY_SQL_LIMIT", 8000000),
|
||||
"producer_batch_size": var("DECODED_LOGS_HISTORY_PRODUCER_BATCH_SIZE", 400000),
|
||||
"worker_batch_size": var("DECODED_LOGS_HISTORY_WORKER_BATCH_SIZE", 100000)
|
||||
} -%}
|
||||
|
||||
{% set wait_time = var("DECODED_LOGS_HISTORY_WAIT_TIME", 60) %}
|
||||
{% set find_months_query %}
|
||||
SELECT
|
||||
DISTINCT date_trunc('month', block_timestamp)::date as month
|
||||
FROM {{ ref('core__fact_blocks') }}
|
||||
ORDER BY month ASC
|
||||
{% endset %}
|
||||
{% set results = run_query(find_months_query) %}
|
||||
|
||||
{% if execute %}
|
||||
{% set months = results.columns[0].values() %}
|
||||
|
||||
{% for month in months %}
|
||||
{% set view_name = 'decoded_logs_history_' ~ month.strftime('%Y_%m') %}
|
||||
|
||||
{% set create_view_query %}
|
||||
create or replace view streamline.{{view_name}} as (
|
||||
WITH target_blocks AS (
|
||||
SELECT
|
||||
block_number
|
||||
FROM {{ ref('core__fact_blocks') }}
|
||||
WHERE date_trunc('month', block_timestamp) = '{{month}}'::timestamp
|
||||
),
|
||||
new_abis AS (
|
||||
SELECT
|
||||
abi,
|
||||
parent_contract_address,
|
||||
event_signature,
|
||||
start_block,
|
||||
end_block
|
||||
FROM {{ ref('silver__complete_event_abis') }}
|
||||
{% if not backfill_mode %}
|
||||
WHERE inserted_timestamp > dateadd('day', -30, sysdate())
|
||||
{% endif %}
|
||||
),
|
||||
existing_logs_to_exclude AS (
|
||||
SELECT _log_id
|
||||
FROM {{ ref('streamline__decoded_logs_complete') }} l
|
||||
INNER JOIN target_blocks b using (block_number)
|
||||
),
|
||||
candidate_logs AS (
|
||||
SELECT
|
||||
l.block_number,
|
||||
l.tx_hash,
|
||||
l.event_index,
|
||||
l.contract_address,
|
||||
l.topics,
|
||||
l.data,
|
||||
concat(l.tx_hash::string, '-', l.event_index::string) as _log_id
|
||||
FROM target_blocks b
|
||||
INNER JOIN {{ ref('core__fact_event_logs') }} l using (block_number)
|
||||
WHERE l.tx_status = 'SUCCESS' and date_trunc('month', l.block_timestamp) = '{{month}}'::timestamp
|
||||
)
|
||||
SELECT
|
||||
l.block_number,
|
||||
l._log_id,
|
||||
A.abi,
|
||||
OBJECT_CONSTRUCT(
|
||||
'topics', l.topics,
|
||||
'data', l.data,
|
||||
'address', l.contract_address
|
||||
) AS data
|
||||
FROM candidate_logs l
|
||||
INNER JOIN new_abis A
|
||||
ON A.parent_contract_address = l.contract_address
|
||||
AND A.event_signature = l.topics[0]::STRING
|
||||
AND l.block_number BETWEEN A.start_block AND A.end_block
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM existing_logs_to_exclude e
|
||||
WHERE e._log_id = l._log_id
|
||||
)
|
||||
LIMIT {{ params.sql_limit }}
|
||||
)
|
||||
{% endset %}
|
||||
{# Create the view #}
|
||||
{% do run_query(create_view_query) %}
|
||||
{{ log("Created view for month " ~ month.strftime('%Y-%m'), info=True) }}
|
||||
{% if var("STREAMLINE_INVOKE_STREAMS", false) %}
|
||||
{# Check if rows exist first #}
|
||||
{% set check_rows_query %}
|
||||
SELECT EXISTS(SELECT 1 FROM streamline.{{view_name}} LIMIT 1)
|
||||
{% endset %}
|
||||
|
||||
{% set results = run_query(check_rows_query) %}
|
||||
{% set has_rows = results.columns[0].values()[0] %}
|
||||
|
||||
{% if has_rows %}
|
||||
{# Invoke streamline, if rows exist to decode #}
|
||||
{% set decode_query %}
|
||||
SELECT
|
||||
streamline.udf_bulk_decode_logs_v2(
|
||||
PARSE_JSON(
|
||||
$${ "external_table": "decoded_logs",
|
||||
"producer_batch_size": {{ params.producer_batch_size }},
|
||||
"sql_limit": {{ params.sql_limit }},
|
||||
"sql_source": "{{view_name}}",
|
||||
"worker_batch_size": {{ params.worker_batch_size }} }$$
|
||||
)
|
||||
);
|
||||
{% endset %}
|
||||
|
||||
{% do run_query(decode_query) %}
|
||||
{{ log("Triggered decoding for month " ~ month.strftime('%Y-%m'), info=True) }}
|
||||
{# Call wait since we actually did some decoding #}
|
||||
{% do run_query("call system$wait(" ~ wait_time ~ ")") %}
|
||||
{{ log("Completed wait after decoding for month " ~ month.strftime('%Y-%m'), info=True) }}
|
||||
{% else %}
|
||||
{{ log("No rows to decode for month " ~ month.strftime('%Y-%m'), info=True) }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,4 +1,4 @@
|
||||
{% macro run_decoded_history() %}
|
||||
{% macro run_decoded_traces_history() %}
|
||||
{% set check_for_new_user_abis_query %}
|
||||
SELECT
|
||||
1
|
||||
@ -11,14 +11,6 @@ WHERE
|
||||
{% if execute %}
|
||||
{% set new_user_abis = results.columns [0].values() [0] %}
|
||||
{% if new_user_abis %}
|
||||
{% set invoke_logs_query %}
|
||||
SELECT
|
||||
github_actions.workflow_dispatches(
|
||||
'FlipsideCrypto',
|
||||
'{{ blockchain }}' || '-models',
|
||||
'dbt_run_streamline_decoded_logs_history.yml',
|
||||
NULL
|
||||
) {% endset %}
|
||||
{% set invoke_traces_query %}
|
||||
SELECT
|
||||
github_actions.workflow_dispatches(
|
||||
@ -27,7 +19,6 @@ WHERE
|
||||
'dbt_run_streamline_decoded_traces_history.yml',
|
||||
NULL
|
||||
) {% endset %}
|
||||
{% do run_query(invoke_logs_query) %}
|
||||
{% do run_query(invoke_traces_query) %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
@ -1,101 +0,0 @@
|
||||
{% macro streamline_external_table_query_decoder(
|
||||
source_name,
|
||||
source_version
|
||||
) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
job_created_time AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number,
|
||||
TO_DATE(
|
||||
concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5))
|
||||
) AS _partition_by_created_date
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_file_registration_history(
|
||||
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
|
||||
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}')
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
id :: STRING AS id,
|
||||
DATA,
|
||||
metadata,
|
||||
b.file_name,
|
||||
_inserted_timestamp,
|
||||
s._partition_by_block_number AS _partition_by_block_number,
|
||||
s._partition_by_created_date AS _partition_by_created_date
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
source_name ~ source_version
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b._partition_by_block_number = s._partition_by_block_number
|
||||
AND b._partition_by_created_date = s._partition_by_created_date
|
||||
WHERE
|
||||
b._partition_by_block_number = s._partition_by_block_number
|
||||
AND b._partition_by_created_date = s._partition_by_created_date
|
||||
AND s._partition_by_created_date >= DATEADD('day', -2, CURRENT_TIMESTAMP())
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro streamline_external_table_query_decoder_fr(
|
||||
source_name,
|
||||
source_version
|
||||
) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
registered_on AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number,
|
||||
TO_DATE(
|
||||
concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5))
|
||||
) AS _partition_by_created_date
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_files(
|
||||
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}'
|
||||
)
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
id :: STRING AS id,
|
||||
DATA,
|
||||
metadata,
|
||||
b.file_name,
|
||||
_inserted_timestamp,
|
||||
s._partition_by_block_number AS _partition_by_block_number,
|
||||
s._partition_by_created_date AS _partition_by_created_date
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
source_name ~ source_version
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b._partition_by_block_number = s._partition_by_block_number
|
||||
AND b._partition_by_created_date = s._partition_by_created_date
|
||||
WHERE
|
||||
b._partition_by_block_number = s._partition_by_block_number
|
||||
AND b._partition_by_created_date = s._partition_by_created_date
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
{% endmacro %}
|
||||
@ -1,141 +0,0 @@
|
||||
{% macro streamline_external_table_query(
|
||||
source_name,
|
||||
source_version,
|
||||
partition_function,
|
||||
balances,
|
||||
block_number,
|
||||
uses_receipts_by_hash
|
||||
) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
job_created_time AS _inserted_timestamp,
|
||||
file_name,
|
||||
{{ partition_function }} AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_file_registration_history(
|
||||
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
|
||||
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}')
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
b._inserted_timestamp
|
||||
|
||||
{% if balances %},
|
||||
r.block_timestamp :: TIMESTAMP AS block_timestamp
|
||||
{% endif %}
|
||||
|
||||
{% if block_number %},
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: STRING,
|
||||
s.metadata :request :"data" :id :: STRING,
|
||||
PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: STRING
|
||||
) :: INT AS block_number
|
||||
{% endif %}
|
||||
{% if uses_receipts_by_hash %},
|
||||
s.value :"TX_HASH" :: STRING AS tx_hash
|
||||
{% endif %}
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
source_name ~ source_version
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.partition_key
|
||||
|
||||
{% if balances %}
|
||||
JOIN {{ ref('_block_ranges') }}
|
||||
r
|
||||
ON r.block_number = COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: INT,
|
||||
s.value :"block_number" :: INT
|
||||
)
|
||||
{% endif %}
|
||||
WHERE
|
||||
b.partition_key = s.partition_key
|
||||
AND DATA :error :code IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
{% endmacro %}
|
||||
|
||||
{% macro streamline_external_table_query_fr(
|
||||
source_name,
|
||||
source_version,
|
||||
partition_function,
|
||||
partition_join_key,
|
||||
balances,
|
||||
block_number,
|
||||
uses_receipts_by_hash
|
||||
) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
registered_on AS _inserted_timestamp,
|
||||
file_name,
|
||||
{{ partition_function }} AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_files(
|
||||
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}'
|
||||
)
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
b._inserted_timestamp
|
||||
|
||||
{% if balances %},
|
||||
r.block_timestamp :: TIMESTAMP AS block_timestamp
|
||||
{% endif %}
|
||||
|
||||
{% if block_number %},
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: STRING,
|
||||
s.value :"block_number" :: STRING,
|
||||
s.metadata :request :"data" :id :: STRING,
|
||||
PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: STRING
|
||||
) :: INT AS block_number
|
||||
{% endif %}
|
||||
{% if uses_receipts_by_hash %},
|
||||
s.value :"TX_HASH" :: STRING AS tx_hash
|
||||
{% endif %}
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
source_name ~ source_version
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.{{ partition_join_key }}
|
||||
|
||||
{% if balances %}
|
||||
JOIN {{ ref('_block_ranges') }}
|
||||
r
|
||||
ON r.block_number = COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: INT,
|
||||
s.value :"block_number" :: INT
|
||||
)
|
||||
{% endif %}
|
||||
WHERE
|
||||
b.partition_key = s.{{ partition_join_key }}
|
||||
AND DATA :error :code IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
{% endmacro %}
|
||||
@ -1,36 +0,0 @@
|
||||
{% macro log_bronze_details(source_name, source_version, model_type, partition_function, partition_join_key, block_number, uses_receipts_by_hash) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
{% if model_type != '' %}
|
||||
{% set model_type = '_' ~ model_type %}
|
||||
{% endif %}
|
||||
|
||||
{%- if flags.WHICH == 'compile' and execute -%}
|
||||
|
||||
{{ log("=== Current Variable Settings ===", info=True) }}
|
||||
{{ log(source_name ~ model_type ~ '_PARTITION_FUNCTION: ' ~ partition_function, info=True) }}
|
||||
{{ log(source_name ~ model_type ~ '_PARTITION_JOIN_KEY: ' ~ partition_join_key, info=True) }}
|
||||
{{ log(source_name ~ model_type ~ '_BLOCK_NUMBER: ' ~ block_number, info=True) }}
|
||||
{% if uses_receipts_by_hash %}
|
||||
{{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }}
|
||||
{% endif %}
|
||||
|
||||
{{ log("", info=True) }}
|
||||
{{ log("=== Source Details ===", info=True) }}
|
||||
{{ log("Source: " ~ source('bronze_streamline', source_name.lower() ~ source_version.lower()), info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{% set config_log = '\n' %}
|
||||
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
|
||||
{% set config_log = config_log ~ '\n{{ config (\n' %}
|
||||
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
|
||||
{% set config_log = config_log ~ ') }}\n' %}
|
||||
{{ log(config_log, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{%- endif -%}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,29 +0,0 @@
|
||||
{% macro log_complete_details(post_hook, full_refresh_type, uses_receipts_by_hash) %}
|
||||
|
||||
{%- if flags.WHICH == 'compile' and execute -%}
|
||||
|
||||
{% if uses_receipts_by_hash %}
|
||||
|
||||
{{ log("=== Current Variable Settings ===", info=True) }}
|
||||
{{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% set config_log = '\n' %}
|
||||
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
|
||||
{% set config_log = config_log ~ '\n{{ config (\n' %}
|
||||
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' unique_key = "' ~ config.get('unique_key') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' cluster_by = "' ~ config.get('cluster_by') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' merge_update_columns = ' ~ config.get('merge_update_columns') | tojson ~ ',\n' %}
|
||||
{% set config_log = config_log ~ ' post_hook = "' ~ post_hook ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' incremental_predicates = ' ~ config.get('incremental_predicates') | tojson ~ ',\n' %}
|
||||
{% set config_log = config_log ~ ' full_refresh = ' ~ full_refresh_type ~ ',\n' %}
|
||||
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
|
||||
{% set config_log = config_log ~ ') }}\n' %}
|
||||
{{ log(config_log, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{%- endif -%}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,36 +0,0 @@
|
||||
{% macro log_model_details(vars=false, params=false) %}
|
||||
|
||||
{%- if execute -%}
|
||||
/*
|
||||
DBT Model Config:
|
||||
{{ model.config | tojson(indent=2) }}
|
||||
*/
|
||||
|
||||
{% if vars is not false %}
|
||||
|
||||
{% if var('LOG_MODEL_DETAILS', false) %}
|
||||
{{ log( vars | tojson(indent=2), info=True) }}
|
||||
{% endif %}
|
||||
/*
|
||||
Variables:
|
||||
{{ vars | tojson(indent=2) }}
|
||||
*/
|
||||
{% endif %}
|
||||
|
||||
{% if params is not false %}
|
||||
|
||||
{% if var('LOG_MODEL_DETAILS', false) %}
|
||||
{{ log( params | tojson(indent=2), info=True) }}
|
||||
{% endif %}
|
||||
/*
|
||||
Parameters:
|
||||
{{ params | tojson(indent=2) }}
|
||||
*/
|
||||
{% endif %}
|
||||
|
||||
/*
|
||||
Raw Code:
|
||||
{{ model.raw_code }}
|
||||
*/
|
||||
{%- endif -%}
|
||||
{% endmacro %}
|
||||
@ -1,55 +0,0 @@
|
||||
{% macro log_streamline_details(model_name, model_type, node_url, model_quantum_state, sql_limit, testing_limit, order_by_clause, new_build, streamline_params, uses_receipts_by_hash, method, method_params, min_block=0) %}
|
||||
|
||||
{%- if flags.WHICH == 'compile' and execute -%}
|
||||
|
||||
{{ log("=== Current Variable Settings ===", info=True) }}
|
||||
{{ log("START_UP_BLOCK: " ~ min_block, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{{ log("=== API Details ===", info=True) }}
|
||||
|
||||
{{ log("NODE_URL: " ~ node_url, info=True) }}
|
||||
{{ log("NODE_SECRET_PATH: " ~ var('GLOBAL_NODE_SECRET_PATH'), info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{{ log("=== Current Variable Settings ===", info=True) }}
|
||||
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_model_quantum_state').upper() ~ ': ' ~ model_quantum_state, info=True) }}
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_sql_limit').upper() ~ ': ' ~ sql_limit, info=True) }}
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_testing_limit').upper() ~ ': ' ~ testing_limit, info=True) }}
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper() ~ ': ' ~ order_by_clause, info=True) }}
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_new_build').upper() ~ ': ' ~ new_build, info=True) }}
|
||||
{{ log('USES_RECEIPTS_BY_HASH' ~ ': ' ~ uses_receipts_by_hash, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{{ log("=== RPC Details ===", info=True) }}
|
||||
|
||||
{{ log(model_name ~ ": {", info=True) }}
|
||||
{{ log(" method: '" ~ method ~ "',", info=True) }}
|
||||
{{ log(" method_params: " ~ method_params, info=True) }}
|
||||
{{ log("}", info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{% set params_str = streamline_params | tojson %}
|
||||
{% set params_formatted = params_str | replace('{', '{\n ') | replace('}', '\n }') | replace(', ', ',\n ') %}
|
||||
|
||||
{# Clean up the method_params formatting #}
|
||||
{% set params_formatted = params_formatted | replace('"method_params": "', '"method_params": "') | replace('\\n', ' ') | replace('\\u0027', "'") %}
|
||||
|
||||
{% set config_log = '\n' %}
|
||||
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
|
||||
{% set config_log = config_log ~ '\n{{ config (\n' %}
|
||||
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' post_hook = fsc_utils.if_data_call_function_v2(\n' %}
|
||||
{% set config_log = config_log ~ ' func = "streamline.udf_bulk_rest_api_v2",\n' %}
|
||||
{% set config_log = config_log ~ ' target = "' ~ this.schema ~ '.' ~ this.identifier ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' params = ' ~ params_formatted ~ '\n' %}
|
||||
{% set config_log = config_log ~ ' ),\n' %}
|
||||
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
|
||||
{% set config_log = config_log ~ ') }}\n' %}
|
||||
{{ log(config_log, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{%- endif -%}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,47 +0,0 @@
|
||||
{% macro set_default_variables_streamline(model_name, model_type) %}
|
||||
|
||||
{%- set node_url = var('GLOBAL_NODE_URL', '{Service}/{Authentication}') -%}
|
||||
{%- set node_secret_path = var('GLOBAL_NODE_SECRET_PATH', '') -%}
|
||||
{%- set model_quantum_state = var((model_name ~ '_' ~ model_type ~ '_quantum_state').upper(), 'streamline') -%}
|
||||
{%- set testing_limit = var((model_name ~ '_' ~ model_type ~ '_testing_limit').upper(), none) -%}
|
||||
{%- set new_build = var((model_name ~ '_' ~ model_type ~ '_new_build').upper(), false) -%}
|
||||
{%- set default_order = 'ORDER BY partition_key DESC, block_number DESC' if model_type.lower() == 'realtime'
|
||||
else 'ORDER BY partition_key ASC, block_number ASC' -%}
|
||||
{%- set order_by_clause = var((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper(), default_order) -%}
|
||||
{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%}
|
||||
|
||||
{%- set variables = {
|
||||
'node_url': node_url,
|
||||
'node_secret_path': node_secret_path,
|
||||
'model_quantum_state': model_quantum_state,
|
||||
'testing_limit': testing_limit,
|
||||
'new_build': new_build,
|
||||
'order_by_clause': order_by_clause,
|
||||
'uses_receipts_by_hash': uses_receipts_by_hash
|
||||
} -%}
|
||||
|
||||
{{ return(variables) }}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
{% macro set_default_variables_bronze(source_name, model_type) %}
|
||||
|
||||
{%- set partition_function = var(source_name ~ model_type ~ '_PARTITION_FUNCTION',
|
||||
"CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER)")
|
||||
-%}
|
||||
{%- set partition_join_key = var(source_name ~ model_type ~ '_PARTITION_JOIN_KEY', 'partition_key') -%}
|
||||
{%- set block_number = var(source_name ~ model_type ~ '_BLOCK_NUMBER', true) -%}
|
||||
{%- set balances = var(source_name ~ model_type ~ '_BALANCES', false) -%}
|
||||
{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%}
|
||||
|
||||
{%- set variables = {
|
||||
'partition_function': partition_function,
|
||||
'partition_join_key': partition_join_key,
|
||||
'block_number': block_number,
|
||||
'balances': balances,
|
||||
'uses_receipts_by_hash': uses_receipts_by_hash
|
||||
} -%}
|
||||
|
||||
{{ return(variables) }}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,60 +0,0 @@
|
||||
{% macro set_streamline_parameters(model_name, model_type, multiplier=1) %}
|
||||
|
||||
{%- set rpc_config_details = {
|
||||
"blocks_transactions": {
|
||||
"method": 'eth_getBlockByNumber',
|
||||
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE)',
|
||||
"exploded_key": ['result', 'result.transactions']
|
||||
},
|
||||
"receipts_by_hash": {
|
||||
"method": 'eth_getTransactionReceipt',
|
||||
"method_params": 'ARRAY_CONSTRUCT(tx_hash)'
|
||||
},
|
||||
"receipts": {
|
||||
"method": 'eth_getBlockReceipts',
|
||||
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number))',
|
||||
"exploded_key": ['result'],
|
||||
"lambdas": 2
|
||||
|
||||
},
|
||||
"traces": {
|
||||
"method": 'debug_traceBlockByNumber',
|
||||
"method_params": "ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s'))",
|
||||
"exploded_key": ['result'],
|
||||
"lambdas": 2
|
||||
},
|
||||
"confirm_blocks": {
|
||||
"method": 'eth_getBlockByNumber',
|
||||
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), FALSE)'
|
||||
}
|
||||
} -%}
|
||||
|
||||
{%- set rpc_config = rpc_config_details[model_name.lower()] -%}
|
||||
|
||||
{%- set params = {
|
||||
"external_table": var((model_name ~ '_' ~ model_type ~ '_external_table').upper(), model_name.lower()),
|
||||
"sql_limit": var((model_name ~ '_' ~ model_type ~ '_sql_limit').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier),
|
||||
"producer_batch_size": var((model_name ~ '_' ~ model_type ~ '_producer_batch_size').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier),
|
||||
"worker_batch_size": var(
|
||||
(model_name ~ '_' ~ model_type ~ '_worker_batch_size').upper(),
|
||||
(2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier) // (rpc_config.get('lambdas', 1))
|
||||
),
|
||||
"sql_source": (model_name ~ '_' ~ model_type).lower(),
|
||||
"method": rpc_config['method'],
|
||||
"method_params": rpc_config['method_params']
|
||||
} -%}
|
||||
|
||||
{%- if rpc_config.get('exploded_key') is not none -%}
|
||||
{%- do params.update({"exploded_key": tojson(rpc_config['exploded_key'])}) -%}
|
||||
{%- endif -%}
|
||||
|
||||
{%- if rpc_config.get('lambdas') is not none -%}
|
||||
{%- do params.update({"lambdas": rpc_config['lambdas']}) -%}
|
||||
{%- endif -%}
|
||||
|
||||
{{ return(params) }}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
|
||||
@ -1,7 +0,0 @@
|
||||
{% macro grant_data_share_statement(table_name, resource_type) %}
|
||||
{% if target.database == 'ETHEREUM' %}
|
||||
GRANT SELECT ON {{ resource_type }} ETHEREUM.CORE.{{ table_name }} TO SHARE "FLIPSIDE_ETHEREUM";
|
||||
{% else %}
|
||||
select 1; -- hooks will error if they don't have valid SQL in them, this handles that!
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
@ -1,12 +0,0 @@
|
||||
{% macro lookback() %}
|
||||
{% if execute and is_incremental() %}
|
||||
{% set query %}
|
||||
SELECT
|
||||
MAX(_inserted_timestamp) :: DATE - 3
|
||||
FROM
|
||||
{{ this }};
|
||||
{% endset %}
|
||||
{% set last_week = run_query(query).columns [0] [0] %}
|
||||
{% do return(last_week) %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
@ -1,32 +0,0 @@
|
||||
{% macro sp_create_load_nft_metadata() %}
|
||||
{% if var("UPDATE_UDFS_AND_SPS") %}
|
||||
{% set sql %}
|
||||
CREATE OR REPLACE PROCEDURE silver.sp_run_load_nft_metadata()
|
||||
RETURNS variant
|
||||
LANGUAGE SQL
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
RESULT VARCHAR;
|
||||
row_cnt INTEGER;
|
||||
BEGIN
|
||||
row_cnt:= (
|
||||
SELECT
|
||||
COUNT(1)
|
||||
FROM
|
||||
silver.nft_metadata_api_requests
|
||||
);
|
||||
if (
|
||||
row_cnt > 0
|
||||
) THEN RESULT:= (
|
||||
SELECT
|
||||
silver.udf_load_nft_metadata()
|
||||
);
|
||||
ELSE RESULT:= NULL;
|
||||
END if;
|
||||
RETURN RESULT;
|
||||
END;
|
||||
$${% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
@ -27,11 +27,11 @@
|
||||
|
||||
{% if block_number %},
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: INT,
|
||||
s.metadata :request :"data" :id :: INT,
|
||||
PARSE_JSON(
|
||||
TRY_TO_NUMBER(s.value :"BLOCK_NUMBER" :: STRING),
|
||||
TRY_TO_NUMBER(s.metadata :request :"data" :id :: STRING),
|
||||
TRY_TO_NUMBER(PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: INT
|
||||
) :id :: STRING)
|
||||
) AS block_number
|
||||
{% endif %}
|
||||
FROM
|
||||
@ -48,8 +48,8 @@
|
||||
JOIN {{ ref('_block_ranges') }}
|
||||
r
|
||||
ON r.block_number = COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: INT,
|
||||
s.value :"block_number" :: INT
|
||||
TRY_TO_NUMBER(s.value :"BLOCK_NUMBER" :: STRING),
|
||||
TRY_TO_NUMBER(s.value :"block_number" :: STRING)
|
||||
)
|
||||
{% endif %}
|
||||
WHERE
|
||||
@ -88,12 +88,12 @@ SELECT
|
||||
|
||||
{% if block_number %},
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: INT,
|
||||
s.value :"block_number" :: INT,
|
||||
s.metadata :request :"data" :id :: INT,
|
||||
PARSE_JSON(
|
||||
TRY_TO_NUMBER(s.value :"BLOCK_NUMBER" :: STRING),
|
||||
TRY_TO_NUMBER(s.value :"block_number" :: STRING),
|
||||
TRY_TO_NUMBER(s.metadata :request :"data" :id :: STRING),
|
||||
TRY_TO_NUMBER(PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: INT
|
||||
) :id :: STRING)
|
||||
) AS block_number
|
||||
{% endif %}
|
||||
FROM
|
||||
@ -110,8 +110,8 @@ FROM
|
||||
JOIN {{ ref('_block_ranges') }}
|
||||
r
|
||||
ON r.block_number = COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: INT,
|
||||
s.value :"block_number" :: INT
|
||||
TRY_TO_NUMBER(s.value :"BLOCK_NUMBER" :: STRING),
|
||||
TRY_TO_NUMBER(s.value :"block_number" :: STRING)
|
||||
)
|
||||
{% endif %}
|
||||
WHERE
|
||||
@ -1,66 +0,0 @@
|
||||
{% macro decode_traces_history(
|
||||
start,
|
||||
stop
|
||||
) %}
|
||||
|
||||
WITH look_back AS (
|
||||
|
||||
SELECT
|
||||
block_number
|
||||
FROM
|
||||
{{ ref("_24_hour_lookback") }}
|
||||
)
|
||||
SELECT
|
||||
t.block_number,
|
||||
t.tx_hash,
|
||||
t.trace_index,
|
||||
_call_id,
|
||||
A.abi AS abi,
|
||||
A.function_name AS function_name,
|
||||
CASE
|
||||
WHEN TYPE = 'DELEGATECALL' THEN from_address
|
||||
ELSE to_address
|
||||
END AS abi_address,
|
||||
t.input AS input,
|
||||
COALESCE(
|
||||
t.output,
|
||||
'0x'
|
||||
) AS output
|
||||
FROM
|
||||
{{ ref("silver__traces") }}
|
||||
t
|
||||
INNER JOIN {{ ref("silver__complete_function_abis") }} A
|
||||
ON A.parent_contract_address = abi_address
|
||||
AND LEFT(
|
||||
t.input,
|
||||
10
|
||||
) = LEFT(
|
||||
A.function_signature,
|
||||
10
|
||||
)
|
||||
AND t.block_number BETWEEN A.start_block
|
||||
AND A.end_block
|
||||
WHERE
|
||||
(t.block_number BETWEEN {{ start }} AND {{ stop }})
|
||||
and t.block_number < (
|
||||
SELECT
|
||||
block_number
|
||||
FROM
|
||||
look_back
|
||||
)
|
||||
AND t.block_number IS NOT NULL
|
||||
AND _call_id NOT IN (
|
||||
SELECT
|
||||
_call_id
|
||||
FROM
|
||||
{{ ref("streamline__complete_decode_traces") }}
|
||||
WHERE
|
||||
(block_number BETWEEN {{ start }} AND {{ stop }})
|
||||
and block_number < (
|
||||
SELECT
|
||||
block_number
|
||||
FROM
|
||||
look_back
|
||||
))
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,30 +0,0 @@
|
||||
{% macro run_decoded_traces_history() %}
|
||||
|
||||
{% set blockchain = var('GLOBAL_PROD_DB_NAME','ethereum').lower() %}
|
||||
|
||||
{% set check_for_new_user_abis_query %}
|
||||
select 1
|
||||
from {{ ref('silver__user_verified_abis') }}
|
||||
where _inserted_timestamp::date = sysdate()::date
|
||||
{% endset %}
|
||||
|
||||
{% set results = run_query(check_for_new_user_abis_query) %}
|
||||
|
||||
{% if execute %}
|
||||
{% set new_user_abis = results.columns[0].values()[0] %}
|
||||
|
||||
{% if new_user_abis %}
|
||||
{% set invoke_workflow_query %}
|
||||
SELECT
|
||||
github_actions.workflow_dispatches(
|
||||
'FlipsideCrypto',
|
||||
'{{ blockchain }}' ~ '-models',
|
||||
'dbt_run_streamline_decoded_traces_history.yml',
|
||||
NULL
|
||||
)
|
||||
{% endset %}
|
||||
|
||||
{% do run_query(invoke_workflow_query) %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
@ -1,26 +0,0 @@
|
||||
{% macro create_sp_get_beacon_blocks_history() %}
|
||||
{% set sql %}
|
||||
CREATE
|
||||
OR REPLACE PROCEDURE streamline.sp_get_beacon_blocks_history() returns variant LANGUAGE SQL AS $$
|
||||
DECLARE
|
||||
RESULT variant;
|
||||
row_cnt INTEGER;
|
||||
BEGIN
|
||||
row_cnt:= (
|
||||
SELECT
|
||||
COUNT(1)
|
||||
FROM
|
||||
{{ ref('streamline__beacon_blocks_history') }}
|
||||
);
|
||||
if (
|
||||
row_cnt > 0
|
||||
) THEN RESULT:= (
|
||||
SELECT
|
||||
streamline.udf_get_beacon_blocks()
|
||||
);
|
||||
ELSE RESULT:= NULL;
|
||||
END if;
|
||||
RETURN RESULT;
|
||||
END;$$ {% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% endmacro %}
|
||||
@ -1,26 +0,0 @@
|
||||
{% macro create_sp_get_beacon_blocks_realtime() %}
|
||||
{% set sql %}
|
||||
CREATE
|
||||
OR REPLACE PROCEDURE streamline.sp_get_beacon_blocks_realtime() returns variant LANGUAGE SQL AS $$
|
||||
DECLARE
|
||||
RESULT variant;
|
||||
row_cnt INTEGER;
|
||||
BEGIN
|
||||
row_cnt:= (
|
||||
SELECT
|
||||
COUNT(1)
|
||||
FROM
|
||||
{{ ref('streamline__beacon_blocks_realtime') }}
|
||||
);
|
||||
if (
|
||||
row_cnt > 0
|
||||
) THEN RESULT:= (
|
||||
SELECT
|
||||
streamline.udf_get_beacon_blocks()
|
||||
);
|
||||
ELSE RESULT:= NULL;
|
||||
END if;
|
||||
RETURN RESULT;
|
||||
END;$$ {% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% endmacro %}
|
||||
@ -1,26 +0,0 @@
|
||||
{% macro create_sp_get_blocks_history() %}
|
||||
{% set sql %}
|
||||
CREATE
|
||||
OR REPLACE PROCEDURE streamline.sp_get_blocks_history() returns variant LANGUAGE SQL AS $$
|
||||
DECLARE
|
||||
RESULT variant;
|
||||
row_cnt INTEGER;
|
||||
BEGIN
|
||||
row_cnt:= (
|
||||
SELECT
|
||||
COUNT(1)
|
||||
FROM
|
||||
{{ ref('streamline__blocks_history') }}
|
||||
);
|
||||
if (
|
||||
row_cnt > 0
|
||||
) THEN RESULT:= (
|
||||
SELECT
|
||||
streamline.udf_get_blocks()
|
||||
);
|
||||
ELSE RESULT:= NULL;
|
||||
END if;
|
||||
RETURN RESULT;
|
||||
END;$$ {% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% endmacro %}
|
||||
@ -1,26 +0,0 @@
|
||||
{% macro create_sp_get_blocks_realtime() %}
|
||||
{% set sql %}
|
||||
CREATE
|
||||
OR REPLACE PROCEDURE streamline.sp_get_blocks_realtime() returns variant LANGUAGE SQL AS $$
|
||||
DECLARE
|
||||
RESULT variant;
|
||||
row_cnt INTEGER;
|
||||
BEGIN
|
||||
row_cnt:= (
|
||||
SELECT
|
||||
COUNT(1)
|
||||
FROM
|
||||
{{ ref('streamline__blocks_realtime') }}
|
||||
);
|
||||
if (
|
||||
row_cnt > 0
|
||||
) THEN RESULT:= (
|
||||
SELECT
|
||||
streamline.udf_get_blocks()
|
||||
);
|
||||
ELSE RESULT:= NULL;
|
||||
END if;
|
||||
RETURN RESULT;
|
||||
END;$$ {% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% endmacro %}
|
||||
@ -1,26 +0,0 @@
|
||||
{% macro create_sp_get_contract_abis_history() %}
|
||||
{% set sql %}
|
||||
CREATE
|
||||
OR REPLACE PROCEDURE streamline.sp_get_contract_abis_history() returns variant LANGUAGE SQL AS $$
|
||||
DECLARE
|
||||
RESULT variant;
|
||||
row_cnt INTEGER;
|
||||
BEGIN
|
||||
row_cnt:= (
|
||||
SELECT
|
||||
COUNT(1)
|
||||
FROM
|
||||
{{ ref('streamline__contract_abis_history') }}
|
||||
);
|
||||
if (
|
||||
row_cnt > 0
|
||||
) THEN RESULT:= (
|
||||
SELECT
|
||||
streamline.udf_get_contract_abis()
|
||||
);
|
||||
ELSE RESULT:= NULL;
|
||||
END if;
|
||||
RETURN RESULT;
|
||||
END;$$ {% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% endmacro %}
|
||||
@ -1,26 +0,0 @@
|
||||
{% macro create_sp_get_contract_abis_realtime() %}
|
||||
{% set sql %}
|
||||
CREATE
|
||||
OR REPLACE PROCEDURE streamline.sp_get_contract_abis_realtime() returns variant LANGUAGE SQL AS $$
|
||||
DECLARE
|
||||
RESULT variant;
|
||||
row_cnt INTEGER;
|
||||
BEGIN
|
||||
row_cnt:= (
|
||||
SELECT
|
||||
COUNT(1)
|
||||
FROM
|
||||
{{ ref('streamline__contract_abis_realtime') }}
|
||||
);
|
||||
if (
|
||||
row_cnt > 0
|
||||
) THEN RESULT:= (
|
||||
SELECT
|
||||
streamline.udf_get_contract_abis()
|
||||
);
|
||||
ELSE RESULT:= NULL;
|
||||
END if;
|
||||
RETURN RESULT;
|
||||
END;$$ {% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% endmacro %}
|
||||
@ -1,26 +0,0 @@
|
||||
{% macro create_sp_get_contract_reads_history() %}
|
||||
{% set sql %}
|
||||
CREATE
|
||||
OR REPLACE PROCEDURE streamline.sp_get_contract_reads_history() returns variant LANGUAGE SQL AS $$
|
||||
DECLARE
|
||||
RESULT variant;
|
||||
row_cnt INTEGER;
|
||||
BEGIN
|
||||
row_cnt:= (
|
||||
SELECT
|
||||
COUNT(1)
|
||||
FROM
|
||||
{{ ref('streamline__contract_reads_history') }}
|
||||
);
|
||||
if (
|
||||
row_cnt > 0
|
||||
) THEN RESULT:= (
|
||||
SELECT
|
||||
streamline.udf_get_contract_reads()
|
||||
);
|
||||
ELSE RESULT:= NULL;
|
||||
END if;
|
||||
RETURN RESULT;
|
||||
END;$$ {% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% endmacro %}
|
||||
@ -1,26 +0,0 @@
|
||||
{% macro create_sp_get_contract_reads_realtime() %}
|
||||
{% set sql %}
|
||||
CREATE
|
||||
OR REPLACE PROCEDURE streamline.sp_get_contract_reads_realtime() returns variant LANGUAGE SQL AS $$
|
||||
DECLARE
|
||||
RESULT variant;
|
||||
row_cnt INTEGER;
|
||||
BEGIN
|
||||
row_cnt:= (
|
||||
SELECT
|
||||
COUNT(1)
|
||||
FROM
|
||||
{{ ref('streamline__contract_reads_realtime') }}
|
||||
);
|
||||
if (
|
||||
row_cnt > 0
|
||||
) THEN RESULT:= (
|
||||
SELECT
|
||||
streamline.udf_get_contract_reads()
|
||||
);
|
||||
ELSE RESULT:= NULL;
|
||||
END if;
|
||||
RETURN RESULT;
|
||||
END;$$ {% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% endmacro %}
|
||||
@ -1,26 +0,0 @@
|
||||
{% macro create_sp_get_transactions_history() %}
|
||||
{% set sql %}
|
||||
CREATE
|
||||
OR REPLACE PROCEDURE streamline.sp_get_transactions_history() returns variant LANGUAGE SQL AS $$
|
||||
DECLARE
|
||||
RESULT variant;
|
||||
row_cnt INTEGER;
|
||||
BEGIN
|
||||
row_cnt:= (
|
||||
SELECT
|
||||
COUNT(1)
|
||||
FROM
|
||||
{{ ref('streamline__transactions_history') }}
|
||||
);
|
||||
if (
|
||||
row_cnt > 0
|
||||
) THEN RESULT:= (
|
||||
SELECT
|
||||
streamline.udf_get_transactions()
|
||||
);
|
||||
ELSE RESULT:= NULL;
|
||||
END if;
|
||||
RETURN RESULT;
|
||||
END;$$ {% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% endmacro %}
|
||||
@ -1,26 +0,0 @@
|
||||
{% macro create_sp_get_transactions_realtime() %}
|
||||
{% set sql %}
|
||||
CREATE
|
||||
OR REPLACE PROCEDURE streamline.sp_get_transactions_realtime() returns variant LANGUAGE SQL AS $$
|
||||
DECLARE
|
||||
RESULT variant;
|
||||
row_cnt INTEGER;
|
||||
BEGIN
|
||||
row_cnt:= (
|
||||
SELECT
|
||||
COUNT(1)
|
||||
FROM
|
||||
{{ ref('streamline__transactions_realtime') }}
|
||||
);
|
||||
if (
|
||||
row_cnt > 0
|
||||
) THEN RESULT:= (
|
||||
SELECT
|
||||
streamline.udf_get_transactions()
|
||||
);
|
||||
ELSE RESULT:= NULL;
|
||||
END if;
|
||||
RETURN RESULT;
|
||||
END;$$ {% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% endmacro %}
|
||||
@ -1,203 +0,0 @@
|
||||
{% macro create_udf_get_token_balances() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_get_token_balances(
|
||||
json variant
|
||||
) returns text api_integration = aws_ethereum_api AS {% if target.name == "prod" %}
|
||||
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/bulk_get_token_balances'
|
||||
{% else %}
|
||||
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/bulk_get_token_balances'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_get_eth_balances() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_get_eth_balances(
|
||||
json variant
|
||||
) returns text api_integration = aws_ethereum_api AS {% if target.name == "prod" %}
|
||||
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/bulk_get_eth_balances'
|
||||
{% else %}
|
||||
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/bulk_get_eth_balances'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_get_reads() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_get_reads(
|
||||
json variant
|
||||
) returns text api_integration = aws_ethereum_api AS {% if target.name == "prod" %}
|
||||
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/bulk_get_reads'
|
||||
{% else %}
|
||||
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/bulk_get_reads'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_get_contract_abis() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_get_contract_abis() returns text api_integration = aws_ethereum_api AS {% if target.name == "prod" %}
|
||||
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/bulk_get_contract_abis'
|
||||
{% else %}
|
||||
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/bulk_get_contract_abis'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_get_blocks() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_get_blocks(
|
||||
json variant
|
||||
) returns text api_integration = aws_ethereum_api AS {% if target.name == "prod" %}
|
||||
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/bulk_get_blocks'
|
||||
{% else %}
|
||||
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/bulk_get_blocks'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_get_transactions() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_get_transactions(
|
||||
json variant
|
||||
) returns text api_integration = aws_ethereum_api AS {% if target.name == "prod" %}
|
||||
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/bulk_get_transactions'
|
||||
{% else %}
|
||||
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/bulk_get_transactions'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_get_beacon_blocks() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_get_beacon_blocks(
|
||||
json variant
|
||||
) returns text api_integration = aws_ethereum_api AS {% if target.name == "prod" %}
|
||||
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/bulk_get_beacon_blocks'
|
||||
{% else %}
|
||||
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/bulk_get_beacon_blocks'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_get_chainhead() %}
|
||||
CREATE EXTERNAL FUNCTION IF NOT EXISTS streamline.udf_get_chainhead() returns variant api_integration = aws_ethereum_api AS {% if target.name == "prod" %}
|
||||
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/get_chainhead'
|
||||
{% else %}
|
||||
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/get_chainhead'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_get_beacon_chainhead() %}
|
||||
CREATE EXTERNAL FUNCTION IF NOT EXISTS streamline.udf_get_beacon_chainhead() returns variant api_integration = aws_ethereum_api AS {% if target.name == "prod" %}
|
||||
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/get_beacon_chainhead'
|
||||
{% else %}
|
||||
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/get_beacon_chainhead'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_call_eth_node() %}
|
||||
CREATE EXTERNAL FUNCTION IF NOT EXISTS streamline.udf_json_rpc_call(
|
||||
DATA ARRAY
|
||||
) returns variant api_integration = aws_ethereum_api AS {% if target.name == "prod" %}
|
||||
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/call_eth_node'
|
||||
{% else %}
|
||||
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/call_eth_node'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_call_node() %}
|
||||
CREATE EXTERNAL FUNCTION IF NOT EXISTS streamline.udf_call_node(
|
||||
DATA ARRAY
|
||||
) returns variant api_integration = aws_ethereum_api AS {% if target.name == "prod" %}
|
||||
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/call_node'
|
||||
{% else %}
|
||||
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/call_node'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_call_read_batching() %}
|
||||
CREATE EXTERNAL FUNCTION IF NOT EXISTS streamline.udf_json_rpc_read_calls(
|
||||
node_url VARCHAR,
|
||||
headers OBJECT,
|
||||
calls ARRAY
|
||||
) returns variant api_integration = aws_ethereum_api AS {% if target.name == "prod" %}
|
||||
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/call_read_batching'
|
||||
{% else %}
|
||||
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/call_read_batching'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_api() %}
|
||||
CREATE EXTERNAL FUNCTION IF NOT EXISTS streamline.udf_api(
|
||||
method VARCHAR,
|
||||
url VARCHAR,
|
||||
headers OBJECT,
|
||||
DATA OBJECT
|
||||
) returns variant api_integration = aws_ethereum_api AS {% if target.name == "prod" %}
|
||||
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/udf_api'
|
||||
{% else %}
|
||||
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/udf_api'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_decode_array_string() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_decode(
|
||||
abi ARRAY,
|
||||
DATA STRING
|
||||
) returns ARRAY api_integration = aws_ethereum_api AS {% if target.name == "prod" %}
|
||||
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/decode_function'
|
||||
{% else %}
|
||||
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/decode_function'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_decode_array_object() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_decode(
|
||||
abi ARRAY,
|
||||
DATA OBJECT
|
||||
) returns ARRAY api_integration = aws_ethereum_api AS {% if target.name == "prod" %}
|
||||
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/decode_log'
|
||||
{% else %}
|
||||
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/decode_log'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_bulk_decode_logs() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_decode_logs(
|
||||
json OBJECT
|
||||
) returns ARRAY api_integration = aws_ethereum_api AS {% if target.name == "prod" %}
|
||||
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/bulk_decode_logs'
|
||||
{% else %}
|
||||
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/bulk_decode_logs'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_bulk_decode_traces() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_decode_traces(
|
||||
json OBJECT
|
||||
) returns ARRAY api_integration = aws_ethereum_api AS {% if target.name == "prod" %}
|
||||
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/bulk_decode_traces'
|
||||
{% else %}
|
||||
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/bulk_decode_traces'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_rest_api() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_rest_api(
|
||||
json OBJECT
|
||||
) returns ARRAY api_integration = aws_ethereum_api AS {% if target.name == "prod" %}
|
||||
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/bulk_get_rest_api'
|
||||
{% else %}
|
||||
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/bulk_get_rest_api'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_json_rpc() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_json_rpc(
|
||||
json OBJECT
|
||||
) returns ARRAY api_integration = aws_ethereum_api AS {% if target.name == "prod" %}
|
||||
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/bulk_get_json_rpc'
|
||||
{% else %}
|
||||
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/bulk_get_json_rpc'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
@ -1,8 +0,0 @@
|
||||
{% macro create_udf_load_nft_metadata() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION silver.udf_load_nft_metadata() returns text api_integration = aws_nft_metadata_api_dev AS {% if target.name == "prod" -%}
|
||||
'https://6gh4ncj0ig.execute-api.us-east-1.amazonaws.com/prod/bulk_load_nft_metadata/ethereum'
|
||||
{% else %}
|
||||
'https://rtcsra1z35.execute-api.us-east-1.amazonaws.com/dev/bulk_load_nft_metadata/ethereum'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
@ -1,78 +0,0 @@
|
||||
{% macro if_data_call_function(
|
||||
func,
|
||||
target
|
||||
) %}
|
||||
{% if var(
|
||||
"STREAMLINE_INVOKE_STREAMS"
|
||||
) %}
|
||||
{% if execute %}
|
||||
{{ log(
|
||||
"Running macro `if_data_call_function`: Calling udf " ~ func ~ " on " ~ target,
|
||||
True
|
||||
) }}
|
||||
{% endif %}
|
||||
SELECT
|
||||
{{ func }}
|
||||
WHERE
|
||||
EXISTS(
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
{{ target }}
|
||||
LIMIT
|
||||
1
|
||||
)
|
||||
{% else %}
|
||||
{% if execute %}
|
||||
{{ log(
|
||||
"Running macro `if_data_call_function`: NOOP",
|
||||
False
|
||||
) }}
|
||||
{% endif %}
|
||||
SELECT
|
||||
NULL
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro if_data_call_wait() %}
|
||||
{% if var(
|
||||
"STREAMLINE_INVOKE_STREAMS"
|
||||
) %}
|
||||
{% set query %}
|
||||
SELECT
|
||||
1
|
||||
WHERE
|
||||
EXISTS(
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
{{ model.schema ~ "." ~ model.alias }}
|
||||
LIMIT
|
||||
1
|
||||
) {% endset %}
|
||||
{% if execute %}
|
||||
{% set results = run_query(
|
||||
query
|
||||
) %}
|
||||
{% if results %}
|
||||
{{ log(
|
||||
"Waiting...",
|
||||
info = True
|
||||
) }}
|
||||
|
||||
{% set wait_query %}
|
||||
SELECT
|
||||
system$wait(
|
||||
{{ var(
|
||||
"WAIT",
|
||||
600
|
||||
) }}
|
||||
) {% endset %}
|
||||
{% do run_query(wait_query) %}
|
||||
{% else %}
|
||||
SELECT
|
||||
NULL;
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
@ -31,13 +31,10 @@ There is more information on how to use dbt docs in the last section of this doc
|
||||
**Fact Tables:**
|
||||
|
||||
- [fact_blocks](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.core__fact_blocks)
|
||||
- [fact_contract_reads](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.core__fact_contract_reads)
|
||||
- [fact_decoded_event_logs](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.core__fact_decoded_event_logs)
|
||||
- [fact_decoded_traces](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.core__fact_decoded_traces)
|
||||
- [fact_eth_balances](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.core__fact_eth_balances)
|
||||
- [fact_event_logs](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.core__fact_event_logs)
|
||||
- [fact_token_balances](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.core__fact_token_balances)
|
||||
- [fact_token_transfers](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.core__fact_token_transfers)
|
||||
- [fact_traces](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.core__fact_traces)
|
||||
- [fact_transactions](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.core__fact_transactions)
|
||||
|
||||
@ -74,7 +71,6 @@ There is more information on how to use dbt docs in the last section of this doc
|
||||
### NFT Tables (ethereum.nft)
|
||||
|
||||
- [dim_nft_collection_metadata](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.nft__dim_nft_collection_metadata)
|
||||
- [ez_nft_mints](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.nft__ez_nft_mints)
|
||||
- [ez_nft_sales](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.nft__ez_nft_sales)
|
||||
- [ez_nft_transfers](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.nft__ez_nft_transfers)
|
||||
- [ez_lending_liquidations](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.nft__ez_lending_liquidations)
|
||||
@ -87,18 +83,6 @@ There is more information on how to use dbt docs in the last section of this doc
|
||||
|
||||
- [ez_core_metrics_hourly](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.stats__ez_core_metrics_hourly)
|
||||
|
||||
### Aave Tables (ethereum.aave)
|
||||
|
||||
- [ez_borrows](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.aave__ez_borrows)
|
||||
- [ez_deposits](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.aave__ez_deposits)
|
||||
- [ez_flashloans](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.aave__ez_flashloans)
|
||||
- [ez_liquidations](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.aave__ez_liquidations)
|
||||
- [ez_market_stats](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.aave__ez_market_stats)
|
||||
- [ez_proposals](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.aave__ez_proposals)
|
||||
- [ez_repayments](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.aave__ez_repayments)
|
||||
- [ez_votes](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.aave__ez_votes)
|
||||
- [ez_withdraws](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.aave__ez_withdraws)
|
||||
|
||||
### Beacon Chain Tables (ethereum.beacon_chain)
|
||||
|
||||
- [fact_blocks](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.beacon_chain__fact_blocks)
|
||||
@ -119,17 +103,6 @@ There is more information on how to use dbt docs in the last section of this doc
|
||||
- [fact_renewals](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.ens__fact_renewals)
|
||||
- [fact_transfers](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.ens__fact_transfers)
|
||||
|
||||
### Maker Tables (ethereum.maker)
|
||||
|
||||
- [ez_delegations](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.maker__ez_delegations)
|
||||
- [ez_deposits](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.maker__ez_deposits)
|
||||
- [ez_flash_loans](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.maker__ez_flash_loans)
|
||||
- [ez_governance_votes](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.maker__ez_governance_votes)
|
||||
- [ez_liquidations](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.maker__ez_liquidations)
|
||||
- [ez_repayments](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.maker__ez_repayments)
|
||||
- [ez_vault_creation](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.maker__ez_vault_creation)
|
||||
- [ez_withdrawals](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.maker__ez_withdrawals)
|
||||
|
||||
### Uniswap v3 Tables (ethereum.uniswapv3)
|
||||
|
||||
- [ez_lp_actions](https://flipsidecrypto.github.io/ethereum-models/#!/model/model.ethereum_models.uniswapv3__ez_lp_actions)
|
||||
@ -2,7 +2,8 @@
|
||||
materialized = 'view',
|
||||
persist_docs ={ "relation": true,
|
||||
"columns": true },
|
||||
meta ={ 'database_tags':{ 'table':{ 'PURPOSE': 'DEFI' } } }
|
||||
meta ={ 'database_tags':{ 'table':{ 'PURPOSE': 'DEFI' } } },
|
||||
tags = ['gold','beacon','ez']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
|
||||
@ -4,11 +4,11 @@ models:
|
||||
description: This convenience table contains information about the deposits made to the beacon chain, alongside address labels for analysis purposes. Deposit activity in this table is derived from the `DepositEvent` on the `BeaconDepositContract - 0x00000000219ab540356cbb839cbe05303d7705fa` in `ethereum.core.fact_event_logs`. For more info, please visit [The Ethereum Organization](https://ethereum.org/en/developers/docs/consensus-mechanisms/pos/).
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
description: '{{ doc("eth_block_number") }}'
|
||||
description: '{{ doc("evm_block_number") }}'
|
||||
- name: BLOCK_TIMESTAMP
|
||||
description: '{{ doc("eth_block_timestamp") }}'
|
||||
description: '{{ doc("evm_block_timestamp") }}'
|
||||
- name: TX_HASH
|
||||
description: '{{ doc("eth_tx_hash") }}'
|
||||
description: '{{ doc("evm_tx_hash") }}'
|
||||
- name: EVENT_INDEX
|
||||
description: The index of the deposit event within the transaction.
|
||||
- name: DEPOSIT_AMOUNT
|
||||
@ -40,8 +40,8 @@ models:
|
||||
- name: DEPOSIT_INDEX
|
||||
description: The index of the deposit.
|
||||
- name: EZ_DEPOSITS_ID
|
||||
description: '{{ doc("pk") }}'
|
||||
description: '{{ doc("evm_pk") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("inserted_timestamp") }}'
|
||||
description: '{{ doc("evm_inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: '{{ doc("modified_timestamp") }}'
|
||||
description: '{{ doc("evm_modified_timestamp") }}'
|
||||
@ -2,7 +2,8 @@
|
||||
materialized = 'view',
|
||||
persist_docs ={ "relation": true,
|
||||
"columns": true },
|
||||
meta ={ 'database_tags':{ 'table':{ 'PURPOSE': 'DEFI' } } }
|
||||
meta ={ 'database_tags':{ 'table':{ 'PURPOSE': 'DEFI' } } },
|
||||
tags = ['gold','beacon','ez']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
|
||||
@ -4,11 +4,11 @@ models:
|
||||
description: This convenience table contains information about the withdrawals made from the beacon chain, alongside address labels for analysis purposes. Withdrawal activity in this table is derived directly from the `withdrawals` object in the `eth_getBlockByNumber` Ethereum JSON-RPC Method, where block_number represents the block on Ethereum Mainnet and slot_number corresponds to the Beacon Chain slot that the withdrawal was executed in. For more info, please visit [The Ethereum Organization](https://ethereum.org/en/developers/docs/consensus-mechanisms/pos/).
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
description: '{{ doc("eth_block_number") }}'
|
||||
description: '{{ doc("evm_block_number") }}'
|
||||
- name: BLOCK_TIMESTAMP
|
||||
description: '{{ doc("eth_block_timestamp") }}'
|
||||
description: '{{ doc("evm_block_timestamp") }}'
|
||||
- name: BLOCK_HASH
|
||||
description: '{{ doc("eth_blocks_hash") }}'
|
||||
description: '{{ doc("evm_blocks_hash") }}'
|
||||
- name: WITHDRAWAL_AMOUNT
|
||||
description: The amount of the withdrawal.
|
||||
- name: WITHDRAWAL_ADDRESS
|
||||
@ -26,12 +26,12 @@ models:
|
||||
- name: VALIDATOR_INDEX
|
||||
description: The index of the validator associated with the withdrawal.
|
||||
- name: SLOT_NUMBER
|
||||
description: '{{ doc("eth_slot_number") }}'
|
||||
description: '{{ doc("evm_slot_number") }}'
|
||||
- name: EPOCH_NUMBER
|
||||
description: '{{ doc("eth_epoch_number") }}'
|
||||
description: '{{ doc("evm_epoch_number") }}'
|
||||
- name: EZ_WITHDRAWALS_ID
|
||||
description: '{{ doc("pk") }}'
|
||||
description: '{{ doc("evm_pk") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("inserted_timestamp") }}'
|
||||
description: '{{ doc("evm_inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: '{{ doc("modified_timestamp") }}'
|
||||
description: '{{ doc("evm_modified_timestamp") }}'
|
||||
@ -1,7 +1,8 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
persist_docs ={ "relation": true,
|
||||
"columns": true }
|
||||
"columns": true },
|
||||
tags = ['gold','beacon']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
|
||||
@ -1,15 +1,15 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: beacon_chain__fact_attestations
|
||||
description: '{{ doc("attestations_table_doc") }}'
|
||||
description: '{{ doc("evm_attestations_table_doc") }}'
|
||||
|
||||
columns:
|
||||
- name: SLOT_NUMBER
|
||||
description: '{{ doc("eth_slot_number") }}'
|
||||
description: '{{ doc("evm_slot_number") }}'
|
||||
- name: SLOT_TIMESTAMP
|
||||
description: '{{ doc("eth_slot_timestamp") }}'
|
||||
description: '{{ doc("evm_slot_timestamp") }}'
|
||||
- name: EPOCH_NUMBER
|
||||
description: '{{ doc("eth_epoch_number") }}'
|
||||
description: '{{ doc("evm_epoch_number") }}'
|
||||
- name: ATTESTATION_SLOT
|
||||
description: 'The slot number in which the validator is attesting on'
|
||||
- name: ATTESTATION_INDEX
|
||||
@ -29,8 +29,8 @@ models:
|
||||
- name: ATTESTATION_SIGNATURE
|
||||
description: 'A BLS signature that aggregates the signatures of individual validators'
|
||||
- name: FACT_ATTESTATIONS_ID
|
||||
description: '{{ doc("pk") }}'
|
||||
description: '{{ doc("evm_pk") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("inserted_timestamp") }}'
|
||||
description: '{{ doc("evm_inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: '{{ doc("modified_timestamp") }}'
|
||||
description: '{{ doc("evm_modified_timestamp") }}'
|
||||
@ -1,7 +1,8 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
persist_docs ={ "relation": true,
|
||||
"columns": true }
|
||||
"columns": true },
|
||||
tags = ['gold','beacon']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
|
||||
@ -27,8 +27,8 @@ models:
|
||||
- name: SIGNATURE
|
||||
description: The signature hash of the message object.
|
||||
- name: FACT_BLOB_SIDECAR_ID
|
||||
description: '{{ doc("pk") }}'
|
||||
description: '{{ doc("evm_pk") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("inserted_timestamp") }}'
|
||||
description: '{{ doc("evm_inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: '{{ doc("modified_timestamp") }}'
|
||||
description: '{{ doc("evm_modified_timestamp") }}'
|
||||
|
||||
@ -1,7 +1,8 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
persist_docs ={ "relation": true,
|
||||
"columns": true }
|
||||
"columns": true },
|
||||
tags = ['gold','beacon']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
|
||||
@ -1,15 +1,15 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: beacon_chain__fact_blocks
|
||||
description: '{{ doc("beacon_blocks_table_doc") }}'
|
||||
description: '{{ doc("evm_beacon_blocks_table_doc") }}'
|
||||
|
||||
columns:
|
||||
- name: SLOT_NUMBER
|
||||
description: '{{ doc("eth_slot_number") }}'
|
||||
description: '{{ doc("evm_slot_number") }}'
|
||||
- name: EPOCH_NUMBER
|
||||
description: '{{ doc("eth_epoch_number") }}'
|
||||
description: '{{ doc("evm_epoch_number") }}'
|
||||
- name: SLOT_TIMESTAMP
|
||||
description: '{{ doc("eth_slot_timestamp") }}'
|
||||
description: '{{ doc("evm_slot_timestamp") }}'
|
||||
- name: PROPOSER_INDEX
|
||||
description: 'Index of the validator that proposed the block for the slot.'
|
||||
- name: PARENT_ROOT
|
||||
@ -49,10 +49,10 @@ models:
|
||||
- name: EXCESS_BLOB_GAS
|
||||
description: A running total of blob gas consumed in excess of the target, prior to the block. This is used to set blob gas pricing.
|
||||
- name: FACT_BLOCKS_ID
|
||||
description: '{{ doc("pk") }}'
|
||||
description: '{{ doc("evm_pk") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("inserted_timestamp") }}'
|
||||
description: '{{ doc("evm_inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: '{{ doc("modified_timestamp") }}'
|
||||
description: '{{ doc("evm_modified_timestamp") }}'
|
||||
- name: BLOCK_INCLUDED
|
||||
description: 'Boolean flag that indicates whether a block was included for a specific slot, TRUE, or if it is missing/skipped/forked, FALSE.'
|
||||
|
||||
@ -1,7 +1,8 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
persist_docs ={ "relation": true,
|
||||
"columns": true }
|
||||
"columns": true },
|
||||
tags = ['gold','beacon']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
|
||||
@ -1,15 +1,15 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: beacon_chain__fact_deposits
|
||||
description: '{{ doc("beacon_deposits_table_doc") }}'
|
||||
description: '{{ doc("evm_beacon_deposits_table_doc") }}'
|
||||
|
||||
columns:
|
||||
- name: SLOT_NUMBER
|
||||
description: '{{ doc("eth_slot_number") }}'
|
||||
description: '{{ doc("evm_slot_number") }}'
|
||||
- name: SLOT_TIMESTAMP
|
||||
description: '{{ doc("eth_slot_timestamp") }}'
|
||||
description: '{{ doc("evm_slot_timestamp") }}'
|
||||
- name: EPOCH_NUMBER
|
||||
description: '{{ doc("eth_epoch_number") }}'
|
||||
description: '{{ doc("evm_epoch_number") }}'
|
||||
- name: DEPOSIT_AMOUNT
|
||||
description: 'Decimal adjusted Ether deposit amount.'
|
||||
- name: PUBKEY
|
||||
@ -21,8 +21,8 @@ models:
|
||||
- name: PROOFS
|
||||
description: 'The merkle path to the deposit root. In other words, the merkle proof against the current state.eth1_data.root in the BeaconState. Note that the + 1 in the vector length is due to the SSZ length mixed into the root.'
|
||||
- name: FACT_DEPOSITS_ID
|
||||
description: '{{ doc("pk") }}'
|
||||
description: '{{ doc("evm_pk") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("inserted_timestamp") }}'
|
||||
description: '{{ doc("evm_inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: '{{ doc("modified_timestamp") }}'
|
||||
description: '{{ doc("evm_modified_timestamp") }}'
|
||||
@ -1,7 +1,8 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
persist_docs ={ "relation": true,
|
||||
"columns": true }
|
||||
"columns": true },
|
||||
tags = ['gold','beacon']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
|
||||
@ -1,13 +1,13 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: beacon_chain__fact_validator_balances
|
||||
description: '{{ doc("beacon_validator_balances_table_doc") }}'
|
||||
description: '{{ doc("evm_beacon_validator_balances_table_doc") }}'
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
description: Deprecating soon! The name of this column will be replaced with slot_number. The values remain the same and are currently accurate, representative of the Beacon Chain Slot Number. Please migrate queries to the new column by 11/13/23.
|
||||
- name: SLOT_NUMBER
|
||||
description: '{{ doc("eth_slot_number") }}'
|
||||
description: '{{ doc("evm_slot_number") }}'
|
||||
- name: STATE_ID
|
||||
description: 'The hash-tree-root of the BeaconState.'
|
||||
- name: INDEX
|
||||
@ -15,8 +15,8 @@ models:
|
||||
- name: BALANCE
|
||||
description: 'Balance of Validator'
|
||||
- name: FACT_VALIDATOR_BALANCES_ID
|
||||
description: '{{ doc("pk") }}'
|
||||
description: '{{ doc("evm_pk") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("inserted_timestamp") }}'
|
||||
description: '{{ doc("evm_inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: '{{ doc("modified_timestamp") }}'
|
||||
description: '{{ doc("evm_modified_timestamp") }}'
|
||||
@ -1,7 +1,8 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
persist_docs ={ "relation": true,
|
||||
"columns": true }
|
||||
"columns": true },
|
||||
tags = ['gold','beacon']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
|
||||
@ -1,13 +1,13 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: beacon_chain__fact_validators
|
||||
description: '{{ doc("beacon_validators_doc") }}'
|
||||
description: '{{ doc("evm_beacon_validators_doc") }}'
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
description: Deprecating soon! The name of this column will be replaced with slot_number. The values remain the same and are currently accurate, representative of the Beacon Chain Slot Number. Please migrate queries to the new column by 11/13/23.
|
||||
- name: SLOT_NUMBER
|
||||
description: '{{ doc("eth_slot_number") }}'
|
||||
description: '{{ doc("evm_slot_number") }}'
|
||||
- name: STATE_ID
|
||||
description: 'The hash-tree-root of the BeaconState.'
|
||||
- name: INDEX
|
||||
@ -19,11 +19,11 @@ models:
|
||||
- name: ACTIVATION_ELIGIBILITY_EPOCH
|
||||
description: 'Refers to pending validators. The deposit has been recognized by the ETH2 chain at the timestamp of “Eligible for activation”. If there is a queue of pending validators, an estimated timestamp for activation is calculated'
|
||||
- name: ACTIVATION_EPOCH
|
||||
description: '{{ doc("eth_epoch_number") }}'
|
||||
description: '{{ doc("evm_epoch_number") }}'
|
||||
- name: EFFECTIVE_BALANCE
|
||||
description: 'The effective Balance represents a value calculated by the current balance. It is used to determine the size of a reward or penalty a validator receives. The effective balance can **never be higher than 32 ETH.'
|
||||
- name: EXIT_EPOCH
|
||||
description: '{{ doc("eth_epoch_number") }}'
|
||||
description: '{{ doc("evm_epoch_number") }}'
|
||||
- name: PUBKEY
|
||||
description: 'Validator public key.'
|
||||
- name: SLASHED
|
||||
@ -35,8 +35,8 @@ models:
|
||||
- name: VALIDATOR_DETAILS
|
||||
description: 'Information about the validator'
|
||||
- name: FACT_VALIDATORS_ID
|
||||
description: '{{ doc("pk") }}'
|
||||
description: '{{ doc("evm_pk") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("inserted_timestamp") }}'
|
||||
description: '{{ doc("evm_inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: '{{ doc("modified_timestamp") }}'
|
||||
description: '{{ doc("evm_modified_timestamp") }}'
|
||||
@ -1,7 +1,8 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
persist_docs ={ "relation": true,
|
||||
"columns": true }
|
||||
"columns": true },
|
||||
tags = ['gold','beacon']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
|
||||
@ -5,11 +5,11 @@ models:
|
||||
|
||||
columns:
|
||||
- name: SLOT_NUMBER
|
||||
description: '{{ doc("eth_slot_number") }}'
|
||||
description: '{{ doc("evm_slot_number") }}'
|
||||
- name: SLOT_TIMESTAMP
|
||||
description: '{{ doc("eth_slot_timestamp") }}'
|
||||
description: '{{ doc("evm_slot_timestamp") }}'
|
||||
- name: EPOCH_NUMBER
|
||||
description: '{{ doc("eth_epoch_number") }}'
|
||||
description: '{{ doc("evm_epoch_number") }}'
|
||||
- name: WITHDRAWAL_AMOUNT
|
||||
description: The amount of ETH that was withdrawn from the beacon chain.
|
||||
- name: WITHDRAWAL_ADDRESS
|
||||
@ -19,8 +19,8 @@ models:
|
||||
- name: VALIDATOR_INDEX
|
||||
description: The index of the validator that made the withdrawal.
|
||||
- name: FACT_WITHDRAWALS_ID
|
||||
description: '{{ doc("pk") }}'
|
||||
description: '{{ doc("evm_pk") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("inserted_timestamp") }}'
|
||||
description: '{{ doc("evm_inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: '{{ doc("modified_timestamp") }}'
|
||||
description: '{{ doc("evm_modified_timestamp") }}'
|
||||
@ -4,7 +4,7 @@
|
||||
cluster_by = ['slot_timestamp::date'],
|
||||
merge_exclude_columns = ["inserted_timestamp"],
|
||||
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(aggregation_bits,beacon_block_root,source_root,target_root,attestation_signature)",
|
||||
tags = ['beacon']
|
||||
tags = ['silver','beacon']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(slot_number,parent_root,state_root,randao_reveal,graffiti,eth1_block_hash,eth1_deposit_root,signature,block_included)",
|
||||
incremental_predicates = ["dynamic_range", "slot_number"],
|
||||
full_refresh = false,
|
||||
tags = ['beacon']
|
||||
tags = ['silver','beacon']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
cluster_by = ['slot_timestamp::date'],
|
||||
merge_exclude_columns = ["inserted_timestamp"],
|
||||
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(pubkey,signature,withdrawal_credentials)",
|
||||
tags = ['beacon']
|
||||
tags = ['silver','beacon']
|
||||
) }}
|
||||
|
||||
WITH beacon_blocks AS (
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
cluster_by = "ROUND(block_number, -3)",
|
||||
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(id, state_id, pubkey, slashed, withdrawal_credentials)",
|
||||
incremental_predicates = ["dynamic_range", "block_number"],
|
||||
tags = ['beacon']
|
||||
tags = ['silver','beacon']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
cluster_by = ['slot_timestamp::date'],
|
||||
merge_exclude_columns = ["inserted_timestamp"],
|
||||
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(address)",
|
||||
tags = ['beacon']
|
||||
tags = ['silver','beacon']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
cluster_by = "ROUND(slot_number, -3)",
|
||||
merge_exclude_columns = ["inserted_timestamp"],
|
||||
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(blob,kzg_commitment,kzg_proof,body_root,parent_root,state_root,signature)",
|
||||
tags = ['beacon']
|
||||
tags = ['silver','beacon']
|
||||
) }}
|
||||
|
||||
WITH old_base AS (
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
unique_key = "block_number",
|
||||
cluster_by = "block_timestamp::date",
|
||||
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(tx_hash, depositor, deposit_address, platform_address, contract_address, pubkey, withdrawal_credentials, withdrawal_type, withdrawal_address, signature), SUBSTRING(depositor, deposit_address, platform_address, withdrawal_type)",
|
||||
tags = ['beacon','reorg']
|
||||
tags = ['silver','beacon']
|
||||
) }}
|
||||
|
||||
WITH deposit_evt AS (
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
cluster_by = "block_timestamp::date",
|
||||
merge_exclude_columns = ["inserted_timestamp"],
|
||||
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(block_hash,withdrawal_address,withdrawals_root), SUBSTRING(withdrawal_address)",
|
||||
tags = ['beacon']
|
||||
tags = ['silver','beacon']
|
||||
) }}
|
||||
|
||||
WITH withdrawal_blocks AS (
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user