mirror of
https://github.com/FlipsideCrypto/avalanche-models.git
synced 2026-02-06 11:56:43 +00:00
AN-5993/avax-migration (#326)
* initial set up * macros and workflows * wh * defi and nft tags * sl * docs * docs * package * package * v63 * exclude curated * remove * v64
This commit is contained in:
parent
376f8d33d0
commit
3c4ba75812
27
.github/workflows/dbt_alter_all_gha_tasks.yml
vendored
Normal file
27
.github/workflows/dbt_alter_all_gha_tasks.yml
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
name: dbt_alter_all_gha_tasks
|
||||
run-name: dbt_alter_all_gha_tasks
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
task_action:
|
||||
type: choice
|
||||
description: Action to perform on all tasks
|
||||
required: true
|
||||
options:
|
||||
- RESUME
|
||||
- SUSPEND
|
||||
default: RESUME
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_alter_all_gha_tasks.yml@pre-release/v4-beta
|
||||
with:
|
||||
task_action: ${{ inputs.task_action }}
|
||||
target: prod
|
||||
secrets: inherit
|
||||
48
.github/workflows/dbt_alter_gha_task.yml
vendored
48
.github/workflows/dbt_alter_gha_task.yml
vendored
@ -1,48 +0,0 @@
|
||||
name: dbt_alter_gha_task
|
||||
run-name: dbt_alter_gha_task
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
workflow_name:
|
||||
type: string
|
||||
description: Name of the workflow to perform the action on, no .yml extension
|
||||
required: true
|
||||
task_action:
|
||||
type: choice
|
||||
description: Action to perform
|
||||
required: true
|
||||
options:
|
||||
- SUSPEND
|
||||
- RESUME
|
||||
default: SUSPEND
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_alter_gha_tasks.yml@AN-4374/upgrade-dbt-1.7
|
||||
with:
|
||||
workflow_name: |
|
||||
${{ inputs.workflow_name }}
|
||||
task_action: |
|
||||
${{ inputs.task_action }}
|
||||
environment: workflow_prod
|
||||
secrets: inherit
|
||||
32
.github/workflows/dbt_alter_gha_tasks.yml
vendored
Normal file
32
.github/workflows/dbt_alter_gha_tasks.yml
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
name: dbt_alter_gha_tasks
|
||||
run-name: dbt_alter_gha_tasks
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
workflow_name:
|
||||
type: string
|
||||
description: Name of the workflow to perform the action on, no .yml extension
|
||||
required: true
|
||||
task_action:
|
||||
type: choice
|
||||
description: Action to perform
|
||||
required: true
|
||||
options:
|
||||
- SUSPEND
|
||||
- RESUME
|
||||
default: SUSPEND
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_alter_gha_tasks.yml@pre-release/v4-beta
|
||||
with:
|
||||
workflow_name: ${{ inputs.workflow_name }}
|
||||
task_action: ${{ inputs.task_action }}
|
||||
target: prod
|
||||
secrets: inherit
|
||||
19
.github/workflows/dbt_deploy_new_workflows.yml
vendored
Normal file
19
.github/workflows/dbt_deploy_new_workflows.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: dbt_deploy_new_workflows
|
||||
run-name: dbt_deploy_new_workflows
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Deploy New Github Actions
|
||||
command: |
|
||||
make deploy_new_gha_tasks DBT_TARGET=prod
|
||||
secrets: inherit
|
||||
73
.github/workflows/dbt_docs_update.yml
vendored
73
.github/workflows/dbt_docs_update.yml
vendored
@ -5,77 +5,10 @@ on:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
USE_VARS: "${{ vars.USE_VARS }}"
|
||||
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
|
||||
DBT_VERSION: "${{ vars.DBT_VERSION }}"
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: refresh ddl for datashare
|
||||
run: |
|
||||
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
|
||||
- name: checkout docs branch
|
||||
run: |
|
||||
git checkout -B docs origin/main
|
||||
- name: generate dbt docs
|
||||
run: dbt docs generate -t prod
|
||||
|
||||
- name: move files to docs directory
|
||||
run: |
|
||||
mkdir -p ./docs
|
||||
cp target/{catalog.json,manifest.json,index.html} docs/
|
||||
- name: clean up target directory
|
||||
run: dbt clean
|
||||
|
||||
- name: check for changes
|
||||
run: git status
|
||||
|
||||
- name: stage changed files
|
||||
run: git add .
|
||||
|
||||
- name: commit changed files
|
||||
run: |
|
||||
git config user.email "abc@xyz"
|
||||
git config user.name "github-actions"
|
||||
git commit -am "Auto-update docs"
|
||||
- name: push changes to docs
|
||||
run: |
|
||||
git push -f --set-upstream origin docs
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_docs_update.yml@pre-release/v4-beta
|
||||
secrets: inherit
|
||||
30
.github/workflows/dbt_integration_test.yml
vendored
30
.github/workflows/dbt_integration_test.yml
vendored
@ -3,38 +3,20 @@ run-name: ${{ github.event.inputs.branch }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
environment:
|
||||
required: true
|
||||
type: string
|
||||
required: true
|
||||
|
||||
concurrency: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
prepare_vars:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: ${{ inputs.environment }}
|
||||
outputs:
|
||||
warehouse: ${{ steps.set_outputs.outputs.warehouse }}
|
||||
steps:
|
||||
- name: Set warehouse output
|
||||
id: set_outputs
|
||||
run: |
|
||||
echo "warehouse=${{ vars.WAREHOUSE }}" >> $GITHUB_OUTPUT
|
||||
called_workflow_template:
|
||||
needs: prepare_vars
|
||||
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt.yml@main
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_integration_test.yml@pre-release/v4-beta
|
||||
with:
|
||||
command: >
|
||||
target: ${{ inputs.environment }}
|
||||
command: |
|
||||
dbt test --selector 'integration_tests'
|
||||
environment: ${{ inputs.environment }}
|
||||
warehouse: ${{ needs.prepare_vars.outputs.warehouse }}
|
||||
secrets: inherit
|
||||
|
||||
notify-failure:
|
||||
needs: [called_workflow_template]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
58
.github/workflows/dbt_run_abi_refresh.yml
vendored
58
.github/workflows/dbt_run_abi_refresh.yml
vendored
@ -1,58 +0,0 @@
|
||||
name: dbt_run_abi_refresh
|
||||
run-name: dbt_run_abi_refresh
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Update ABI Models
|
||||
run: |
|
||||
dbt run -m "avalanche_models,tag:abis"
|
||||
|
||||
- name: Kick off decoded logs history, if there are new ABIs from users
|
||||
run: |
|
||||
dbt run-operation run_decoded_logs_history
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
51
.github/workflows/dbt_run_adhoc.yml
vendored
51
.github/workflows/dbt_run_adhoc.yml
vendored
@ -1,12 +1,12 @@
|
||||
name: dbt_run_adhoc
|
||||
run-name: dbt_run_adhoc
|
||||
run-name: ${{ inputs.dbt_command }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
environment:
|
||||
target:
|
||||
type: choice
|
||||
description: DBT Run Environment
|
||||
required: true
|
||||
@ -29,48 +29,15 @@ on:
|
||||
description: 'DBT Run Command'
|
||||
required: true
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ inputs.warehouse }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_${{ inputs.environment }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
${{ inputs.dbt_command }}
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
warehouse: ${{ inputs.warehouse }}
|
||||
target: ${{ inputs.target }}
|
||||
command_name: Run DBT Command
|
||||
command: ${{ inputs.dbt_command }}
|
||||
secrets: inherit
|
||||
69
.github/workflows/dbt_run_deployment.yml
vendored
69
.github/workflows/dbt_run_deployment.yml
vendored
@ -1,69 +0,0 @@
|
||||
name: dbt_run_deployment
|
||||
run-name: dbt_run_deployment
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
warehouse:
|
||||
type: choice
|
||||
description: Snowflake warehouse
|
||||
required: true
|
||||
options:
|
||||
- DBT
|
||||
- DBT_CLOUD
|
||||
- DBT_EMERGENCY
|
||||
default: DBT
|
||||
dbt_command:
|
||||
type: string
|
||||
description: 'DBT Run Command'
|
||||
required: true
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
DBT_VERSION: "${{ vars.DBT_VERSION }}"
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ inputs.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
dbt:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
${{ inputs.dbt_command }}
|
||||
- name: Run datashare model
|
||||
run: |
|
||||
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
|
||||
dbt run-operation run_query --args "{sql: call admin.datashare.sp_grant_share_permissions('${{ env.DATABASE }}')}"
|
||||
- name: Store logs
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: dbt-logs
|
||||
path: |
|
||||
logs
|
||||
target
|
||||
79
.github/workflows/dbt_run_dev_refresh.yml
vendored
79
.github/workflows/dbt_run_dev_refresh.yml
vendored
@ -3,82 +3,13 @@ run-name: dbt_run_dev_refresh
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs "at 9:20 UTC on Monday" (see https://crontab.guru)
|
||||
- cron: '20 9 * * 1'
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs_refresh:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run Dev Refresh
|
||||
run: |
|
||||
dbt run-operation run_sp_create_prod_clone
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs_refresh]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
|
||||
run_dbt_jobs_udfs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: run_dbt_jobs_refresh
|
||||
environment:
|
||||
name: workflow_dev
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run Recreate UDFs
|
||||
run: |
|
||||
dbt run-operation fsc_utils.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
|
||||
dbt run -s livequery_models.deploy.core._live --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
|
||||
|
||||
notify-failure2:
|
||||
needs: [run_dbt_jobs_udfs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_dev_refresh.yml@pre-release/v4-beta
|
||||
secrets: inherit
|
||||
51
.github/workflows/dbt_run_full_observability.yml
vendored
51
.github/workflows/dbt_run_full_observability.yml
vendored
@ -3,52 +3,21 @@ run-name: dbt_run_full_observability
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs “At 04:00 on day-of-month 1.” (see https://crontab.guru)
|
||||
- cron: '0 4 1 * *'
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod_2xl
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
warehouse: DBT_EMERGENCY
|
||||
command_name: Run Observability Models
|
||||
command: |
|
||||
dbt run --threads 2 --vars '{"MAIN_OBSERV_FULL_TEST_ENABLED":True}' -m "fsc_evm,tag:observability"
|
||||
secrets: inherit
|
||||
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --threads 2 --vars '{"OBSERV_FULL_TEST":True}' -m "avalanche_models,tag:observability"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
51
.github/workflows/dbt_run_heal_models.yml
vendored
51
.github/workflows/dbt_run_heal_models.yml
vendored
@ -3,52 +3,17 @@ run-name: dbt_run_heal_models
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs at 04:25 on Wednesday (see https://crontab.guru)
|
||||
- cron: '25 4 * * 3'
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "avalanche_models,tag:heal" --vars '{"HEAL_MODEL":True}'
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
command_name: Run Heal Models
|
||||
command: |
|
||||
dbt run -m "$PROJECT_NAME,tag:heal" --vars '{"HEAL_MODEL":True}'
|
||||
secrets: inherit
|
||||
61
.github/workflows/dbt_run_operation_reorg.yml
vendored
61
.github/workflows/dbt_run_operation_reorg.yml
vendored
@ -1,61 +0,0 @@
|
||||
name: dbt_run_operation_reorg
|
||||
run-name: dbt_run_operation_reorg
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs at minute 30 on Monday at 00:00 UTC (see https://crontab.guru)
|
||||
- cron: '30 0 * * 1'
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: List reorg models
|
||||
id: list_models
|
||||
run: |
|
||||
reorg_model_list=$(dbt list --select "avalanche_models,tag:reorg" --resource-type model --output name | grep '__' | awk -F'.' '{print $NF}' | tr '\n' ',' | sed 's/,$//')
|
||||
echo "model_list=$reorg_model_list" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Execute block_reorg macro
|
||||
run: |
|
||||
dbt run-operation fsc_utils.block_reorg --args "{reorg_model_list: '${{ steps.list_models.outputs.model_list }}', hours: '169'}" && awk '/SQL status/ {print; next} /DELETE FROM/{getline; print} /\/\* {/ {print}' logs/dbt.log
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
22
.github/workflows/dbt_run_scheduled_abis.yml
vendored
Normal file
22
.github/workflows/dbt_run_scheduled_abis.yml
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
name: dbt_run_scheduled_abis
|
||||
run-name: dbt_run_scheduled_abis
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run ABI Models
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:gold,tag:abis"
|
||||
command_name_2: Kick off decoded logs history, if there are new user submitted ABIs
|
||||
command_2: |
|
||||
dbt run-operation fsc_evm.run_decoded_logs_history
|
||||
secrets: inherit
|
||||
46
.github/workflows/dbt_run_scheduled_curated.yml
vendored
46
.github/workflows/dbt_run_scheduled_curated.yml
vendored
@ -6,48 +6,14 @@ on:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "avalanche_models,tag:curated"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
command_name: Run Curated Models
|
||||
command: |
|
||||
dbt run -m "$PROJECT_NAME,tag:curated" "fsc_evm,tag:curated"
|
||||
secrets: inherit
|
||||
19
.github/workflows/dbt_run_scheduled_decoder.yml
vendored
Normal file
19
.github/workflows/dbt_run_scheduled_decoder.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: dbt_run_scheduled_decoder
|
||||
run-name: dbt_run_scheduled_decoder
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Decoder Models
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:bronze,tag:decoded_logs" "fsc_evm,tag:silver,tag:decoded_logs" "fsc_evm,tag:gold,tag:decoded_logs"
|
||||
secrets: inherit
|
||||
19
.github/workflows/dbt_run_scheduled_main.yml
vendored
Normal file
19
.github/workflows/dbt_run_scheduled_main.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: dbt_run_scheduled_main
|
||||
run-name: dbt_run_scheduled_main
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Main Models
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:bronze,tag:core" "fsc_evm,tag:silver,tag:core" "fsc_evm,tag:gold,tag:core" "fsc_evm,tag:silver,tag:prices" "fsc_evm,tag:gold,tag:prices" "fsc_evm,tag:silver,tag:labels" "fsc_evm,tag:gold,tag:labels" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
|
||||
secrets: inherit
|
||||
@ -1,53 +0,0 @@
|
||||
name: dbt_run_scheduled_non_realtime
|
||||
run-name: dbt_run_scheduled_non_realtime
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "avalanche_models,tag:non_realtime" "avalanche_models,tag:streamline_decoded_logs_complete" "avalanche_models,tag:streamline_decoded_logs_realtime"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
20
.github/workflows/dbt_run_scheduled_scores.yml
vendored
Normal file
20
.github/workflows/dbt_run_scheduled_scores.yml
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
|
||||
name: dbt_run_scheduled_scores
|
||||
run-name: dbt_run_scheduled_scores
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Scores Models
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:scores"
|
||||
secrets: inherit
|
||||
@ -6,52 +6,15 @@ on:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "avalanche_models,tag:streamline_core_complete" "avalanche_models,tag:streamline_core_realtime" "avalanche_models,tag:streamline_core_complete_receipts" "avalanche_models,tag:streamline_core_realtime_receipts" "avalanche_models,tag:streamline_core_complete_confirm_blocks" "avalanche_models,tag:streamline_core_realtime_confirm_blocks"
|
||||
|
||||
- name: Run Chainhead Tests
|
||||
run: |
|
||||
dbt test -m "avalanche_models,tag:chainhead"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
command_name: Run Chainhead Models
|
||||
command: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:chainhead" "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash"
|
||||
dbt test -m "fsc_evm,tag:chainhead"
|
||||
secrets: inherit
|
||||
@ -1,56 +0,0 @@
|
||||
name: dbt_run_streamline_decoded_logs_history
|
||||
run-name: dbt_run_streamline_decoded_logs_history
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Update complete table
|
||||
run: |
|
||||
dbt run -m "avalanche_models,tag:streamline_decoded_logs_complete"
|
||||
|
||||
- name: Decode historical logs
|
||||
run: |
|
||||
dbt run-operation decoded_logs_history --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
53
.github/workflows/dbt_run_streamline_decoder.yml
vendored
53
.github/workflows/dbt_run_streamline_decoder.yml
vendored
@ -1,53 +0,0 @@
|
||||
name: dbt_run_streamline_decoder
|
||||
run-name: dbt_run_streamline_decoder
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "avalanche_models,tag:decoded_logs"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
22
.github/workflows/dbt_run_streamline_decoder_history.yml
vendored
Normal file
22
.github/workflows/dbt_run_streamline_decoder_history.yml
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
name: dbt_run_streamline_decoder_history
|
||||
run-name: dbt_run_streamline_decoder_history
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Decoder Complete
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete"
|
||||
command_name_2: Run Streamline Decoder History
|
||||
command_2: |
|
||||
dbt run-operation fsc_evm.decoded_logs_history --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
|
||||
secrets: inherit
|
||||
@ -5,6 +5,8 @@ on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
schedule:
|
||||
- cron: '50 * * * *'
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
@ -4,7 +4,6 @@ run-name: dbt_run_streamline_dexalot_history
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs at minute 37 past every hour
|
||||
- cron: '37 * * * *'
|
||||
|
||||
env:
|
||||
|
||||
@ -5,6 +5,8 @@ on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
schedule:
|
||||
- cron: '5 * * * *'
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
19
.github/workflows/dbt_run_streamline_history.yml
vendored
Normal file
19
.github/workflows/dbt_run_streamline_history.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: dbt_run_streamline_history
|
||||
run-name: dbt_run_streamline_history
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run History Models
|
||||
command: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:history" --exclude "fsc_evm,tag:receipts_by_hash"
|
||||
secrets: inherit
|
||||
@ -1,79 +0,0 @@
|
||||
name: dbt_run_streamline_history_adhoc
|
||||
run-name: dbt_run_streamline_history_adhoc
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
environment:
|
||||
type: choice
|
||||
description: DBT Run Environment
|
||||
required: true
|
||||
options:
|
||||
- dev
|
||||
- prod
|
||||
- prod_backfill
|
||||
default: dev
|
||||
warehouse:
|
||||
type: choice
|
||||
description: Snowflake warehouse
|
||||
required: true
|
||||
options:
|
||||
- DBT
|
||||
- DBT_CLOUD
|
||||
- DBT_EMERGENCY
|
||||
default: DBT
|
||||
dbt_command:
|
||||
type: choice
|
||||
description: 'DBT Run Command'
|
||||
required: true
|
||||
options:
|
||||
- dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "avalanche_models,tag:streamline_core_complete" "avalanche_models,tag:streamline_core_history" "avalanche_models,tag:streamline_core_complete_receipts" "avalanche_models,tag:streamline_core_history_receipts" "avalanche_models,tag:streamline_core_complete_confirm_blocks" "avalanche_models,tag:streamline_core_history_confirm_blocks"
|
||||
- dbt run --threads 8 --vars '{"STREAMLINE_INVOKE_STREAMS":True,"WAIT":120}' -m "avalanche_models,tag:streamline_decoded_logs_complete" "avalanche_models,tag:streamline_decoded_logs_history"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ inputs.warehouse }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_${{ inputs.environment }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
${{ inputs.dbt_command }}
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
56
.github/workflows/dbt_test_daily.yml
vendored
56
.github/workflows/dbt_test_daily.yml
vendored
@ -3,53 +3,21 @@ run-name: dbt_test_daily
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs "at 9:00 UTC" (see https://crontab.guru)
|
||||
- cron: '0 9 * * *'
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt test --exclude "avalanche_models,tag:full_test" "avalanche_models,tag:recent_test" "avalanche_models,tag:gha_tasks" livequery_models
|
||||
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
target: test
|
||||
command_name: Build Daily Testing Views
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:daily_test"
|
||||
command_name_2: Run Daily Tests (all tests excluding full, recent and misc. others)
|
||||
command_2: |
|
||||
dbt test --exclude "fsc_evm,tag:full_test" "fsc_evm,tag:recent_test" "fsc_evm,tag:gha_tasks" livequery_models
|
||||
secrets: inherit
|
||||
49
.github/workflows/dbt_test_intraday.yml
vendored
49
.github/workflows/dbt_test_intraday.yml
vendored
@ -6,49 +6,16 @@ on:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "avalanche_models,tag:observability"
|
||||
dbt test -m "avalanche_models,tag:recent_test" --exclude models/dexalot
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
target: test
|
||||
command_name: Run Observability & Recent Tests
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:observability"
|
||||
dbt test -m "fsc_evm,tag:recent_test" --exclude models/dexalot
|
||||
secrets: inherit
|
||||
55
.github/workflows/dbt_test_monthly.yml
vendored
55
.github/workflows/dbt_test_monthly.yml
vendored
@ -3,52 +3,21 @@ run-name: dbt_test_monthly
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs "at 4:00 UTC on 28th of the month" (see https://crontab.guru)
|
||||
- cron: '0 4 28 * *'
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt test -m "avalanche_models,tag:full_test"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
warehouse: DBT_EMERGENCY
|
||||
command_name: Build Full Testing Views
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:full_test"
|
||||
command_name_2: Run Full Tests
|
||||
command_2: |
|
||||
dbt test -m "fsc_evm,tag:full_test"
|
||||
secrets: inherit
|
||||
27
.github/workflows/slack_notify.yml
vendored
27
.github/workflows/slack_notify.yml
vendored
@ -1,27 +0,0 @@
|
||||
name: Slack Notification
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
notify:
|
||||
runs-on: ubuntu-latest
|
||||
environment: workflow_prod
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install requests
|
||||
|
||||
- name: Send Slack notification
|
||||
run: python python/slack_alert.py
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@ -18,4 +18,4 @@ logs/
|
||||
dbt-env/
|
||||
.user.yml
|
||||
|
||||
package-lock.yml
|
||||
/package-lock.yml
|
||||
111
Makefile
111
Makefile
@ -1,6 +1,109 @@
|
||||
SHELL := /bin/bash
|
||||
DBT_TARGET ?= dev
|
||||
RECEIPTS_BY_HASH_ENABLED ?= false
|
||||
|
||||
dbt-console:
|
||||
docker-compose run dbt_console
|
||||
cleanup_time:
|
||||
@set -e; \
|
||||
rm -f package-lock.yml && dbt clean && dbt deps
|
||||
|
||||
.PHONY: dbt-console
|
||||
deploy_gha_workflows_table:
|
||||
@set -e; \
|
||||
echo "Collecting workflow names..." ; \
|
||||
WORKFLOW_VALUES="" ; \
|
||||
for file in $$(find .github/workflows -name "*.yml" -type f); do \
|
||||
filename=$$(basename "$$file" .yml) ; \
|
||||
if [ -z "$$WORKFLOW_VALUES" ]; then \
|
||||
WORKFLOW_VALUES="('$$filename')" ; \
|
||||
else \
|
||||
WORKFLOW_VALUES="$$WORKFLOW_VALUES,('$$filename')" ; \
|
||||
fi ; \
|
||||
done ; \
|
||||
echo "Found workflows: $$WORKFLOW_VALUES" ; \
|
||||
dbt run-operation create_workflow_table --args "{\"workflow_values\": \"$$WORKFLOW_VALUES\"}" -t $(DBT_TARGET)
|
||||
|
||||
deploy_gha_tasks:
|
||||
@set -e; \
|
||||
make deploy_gha_workflows_table DBT_TARGET=$(DBT_TARGET); \
|
||||
dbt run -s livequery_models.deploy.marketplace.github --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.create_gha_tasks --vars '{"RESUME_GHA_TASKS":True}' -t $(DBT_TARGET)
|
||||
|
||||
deploy_new_gha_tasks:
|
||||
@set -e; \
|
||||
make deploy_gha_workflows_table DBT_TARGET=$(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.create_gha_tasks --vars '{"RESUME_GHA_TASKS":True}' -t $(DBT_TARGET)
|
||||
|
||||
deploy_livequery:
|
||||
@set -e; \
|
||||
dbt run-operation fsc_evm.drop_livequery_schemas --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.livequery_grants --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET)
|
||||
|
||||
deploy_chain_phase_1:
|
||||
@set -e; \
|
||||
dbt run -m livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.livequery_grants --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.call_sample_rpc_node -t $(DBT_TARGET); \
|
||||
if [ "$(DBT_TARGET)" != "prod" ]; then \
|
||||
if [ "$(RECEIPTS_BY_HASH_ENABLED)" = "true" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt test -m "fsc_evm,tag:chainhead"; \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True, "MAIN_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts_by_hash" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt test -m "fsc_evm,tag:chainhead"; \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True, "MAIN_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
|
||||
fi; \
|
||||
else \
|
||||
if [ "$(RECEIPTS_BY_HASH_ENABLED)" = "true" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts" --full-refresh --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt test -m "fsc_evm,tag:chainhead"; \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts_by_hash" --full-refresh --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt test -m "fsc_evm,tag:chainhead"; \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
|
||||
fi; \
|
||||
fi; \
|
||||
echo "# wait ~10 minutes"; \
|
||||
echo "# run deploy_chain_phase_2"
|
||||
|
||||
deploy_chain_phase_2:
|
||||
@set -e; \
|
||||
if [ "$(DBT_TARGET)" != "prod" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_2" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_BRONZE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_2" --full-refresh --vars '{"GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_BRONZE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
fi; \
|
||||
echo "# wait ~10 minutes"; \
|
||||
echo "# run deploy_chain_phase_3"
|
||||
|
||||
deploy_chain_phase_3:
|
||||
@set -e; \
|
||||
if [ "$(DBT_TARGET)" != "prod" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_2" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:phase_3" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_2" -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:phase_3" --full-refresh --vars '{"GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
|
||||
fi; \
|
||||
echo "# wait ~10 minutes"; \
|
||||
echo "# run deploy_chain_phase_4"
|
||||
|
||||
deploy_chain_phase_4:
|
||||
@set -e; \
|
||||
if [ "$(DBT_TARGET)" != "prod" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_3" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:phase_4" --full-refresh -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_3" -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:phase_4" --full-refresh -t $(DBT_TARGET); \
|
||||
make deploy_gha_tasks DBT_TARGET=$(DBT_TARGET); \
|
||||
fi; \
|
||||
|
||||
.PHONY: cleanup_time deploy_gha_workflows_table deploy_gha_tasks deploy_new_gha_tasks deploy_livequery deploy_chain_phase_1 deploy_chain_phase_2 deploy_chain_phase_3 deploy_chain_phase_4
|
||||
@ -1,10 +0,0 @@
|
||||
workflow_name,workflow_schedule
|
||||
dbt_run_scheduled_non_realtime,"17,47 * * * *"
|
||||
dbt_run_streamline_chainhead,"10,40 * * * *"
|
||||
dbt_run_streamline_decoder,"25,55 * * * *"
|
||||
dbt_run_scheduled_curated,"35 * * * *"
|
||||
dbt_test_intraday,"37 */4 * * *"
|
||||
dbt_run_streamline_dexalot_chainhead,"50 * * * *"
|
||||
dbt_run_streamline_dexalot_non_realtime,"5 * * * *"
|
||||
dbt_run_streamline_decoded_logs_history,"19 19 * * 6"
|
||||
dbt_run_abi_refresh,"10 23 * * *"
|
||||
|
109
dbt_project.yml
109
dbt_project.yml
@ -17,6 +17,8 @@ test-paths: ["tests"]
|
||||
seed-paths: ["data"]
|
||||
macro-paths: ["macros"]
|
||||
snapshot-paths: ["snapshots"]
|
||||
docs-paths:
|
||||
["dbt_packages/fsc_evm/doc_descriptions", "models/doc_descriptions", "models"]
|
||||
|
||||
target-path: "target" # directory which will store compiled SQL files
|
||||
clean-targets: # directories to be removed by `dbt clean`
|
||||
@ -24,13 +26,6 @@ clean-targets: # directories to be removed by `dbt clean`
|
||||
- "dbt_modules"
|
||||
- "dbt_packages"
|
||||
|
||||
models:
|
||||
+copy_grants: true
|
||||
+persist_docs:
|
||||
relation: true
|
||||
columns: true
|
||||
+on_schema_change: "append_new_columns"
|
||||
|
||||
tests:
|
||||
+store_failures: true # all tests
|
||||
|
||||
@ -55,6 +50,61 @@ query-comment:
|
||||
# Configuring models
|
||||
# Full documentation: https://docs.getdbt.com/docs/configuring-models
|
||||
|
||||
models:
|
||||
avalanche_models: # replace with the name of the chain
|
||||
+copy_grants: true
|
||||
+persist_docs:
|
||||
relation: true
|
||||
columns: true
|
||||
+on_schema_change: "append_new_columns"
|
||||
livequery_models:
|
||||
+materialized: ephemeral
|
||||
fsc_evm:
|
||||
+enabled: false # disable fsc_evm package by default
|
||||
+copy_grants: true
|
||||
+persist_docs:
|
||||
relation: true
|
||||
columns: true
|
||||
+on_schema_change: "append_new_columns"
|
||||
main_package:
|
||||
+enabled: false # disable top level package by default, enabled subpackages as needed
|
||||
admin:
|
||||
+enabled: true
|
||||
core:
|
||||
+enabled: true # enable subpackages, as needed
|
||||
bronze:
|
||||
+enabled: false
|
||||
token_reads:
|
||||
+enabled: true
|
||||
github_actions:
|
||||
+enabled: true
|
||||
labels:
|
||||
+enabled: true
|
||||
observability:
|
||||
+enabled: true
|
||||
prices:
|
||||
+enabled: true
|
||||
utils:
|
||||
+enabled: true
|
||||
decoder_package:
|
||||
+enabled: false
|
||||
abis:
|
||||
+enabled: true
|
||||
decoded_logs:
|
||||
+enabled: false
|
||||
gold:
|
||||
+enabled: true
|
||||
silver:
|
||||
+enabled: true
|
||||
streamline:
|
||||
+enabled: true
|
||||
curated_package:
|
||||
+enabled: false
|
||||
stats:
|
||||
+enabled: true
|
||||
scores_package:
|
||||
+enabled: false
|
||||
|
||||
# In this example config, we tell dbt to build all models in the example/ directory
|
||||
# as tables. These settings can be overridden in the individual model files
|
||||
# using the `{{ config(...) }}` macro.
|
||||
@ -65,13 +115,9 @@ vars:
|
||||
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
|
||||
UPDATE_UDFS_AND_SPS: False
|
||||
UPDATE_SNOWFLAKE_TAGS: True
|
||||
LIMIT_PARTITIONS: True
|
||||
WAIT: 0
|
||||
OBSERV_FULL_TEST: False
|
||||
HEAL_MODEL: False
|
||||
HEAL_MODELS: []
|
||||
START_GHA_TASKS: False
|
||||
RELOAD_TRACES: False
|
||||
|
||||
#### STREAMLINE 2.0 BEGIN ####
|
||||
|
||||
@ -97,44 +143,3 @@ vars:
|
||||
- DBT_CLOUD_AVALANCHE
|
||||
|
||||
#### STREAMLINE 2.0 END ####
|
||||
|
||||
#### FSC_EVM BEGIN ####
|
||||
# Visit https://github.com/FlipsideCrypto/fsc-evm/wiki for more information on required and optional variables
|
||||
|
||||
### GLOBAL VARIABLES BEGIN ###
|
||||
## REQUIRED
|
||||
GLOBAL_PROD_DB_NAME: 'avalanche'
|
||||
GLOBAL_NODE_SECRET_PATH: 'Vault/prod/avalanche/c_chain/quicknode/mainnet'
|
||||
GLOBAL_BLOCKS_PER_HOUR: 1800
|
||||
GLOBAL_USES_STREAMLINE_V1: True
|
||||
GLOBAL_USES_SINGLE_FLIGHT_METHOD: True
|
||||
|
||||
### GLOBAL VARIABLES END ###
|
||||
|
||||
### MAIN_PACKAGE VARIABLES BEGIN ###
|
||||
|
||||
### CORE ###
|
||||
## REQUIRED
|
||||
|
||||
## OPTIONAL
|
||||
|
||||
# GOLD_FULL_REFRESH: True
|
||||
# SILVER_FULL_REFRESH: True
|
||||
|
||||
# BLOCKS_COMPLETE_FULL_REFRESH: True
|
||||
# CONFIRM_BLOCKS_COMPLETE_FULL_REFRESH: True
|
||||
# RECEIPTS_COMPLETE_FULL_REFRESH: True
|
||||
# TRACES_COMPLETE_FULL_REFRESH: True
|
||||
# TRANSACTIONS_COMPLETE_FULL_REFRESH: True
|
||||
|
||||
### MAIN_PACKAGE VARIABLES END ###
|
||||
|
||||
### DECODER_PACKAGE VARIABLES BEGIN ###
|
||||
|
||||
## REQUIRED
|
||||
|
||||
## OPTIONAL
|
||||
|
||||
### DECODER_PACKAGE VARIABLES END ###
|
||||
|
||||
#### FSC_EVM END ####
|
||||
@ -1,17 +1,7 @@
|
||||
{% macro create_udfs() %}
|
||||
{% if var("UPDATE_UDFS_AND_SPS") %}
|
||||
{% if var("UPDATE_UDFS_AND_SPS", false) %}
|
||||
{% set sql %}
|
||||
CREATE schema if NOT EXISTS silver;
|
||||
{{ create_udtf_get_base_table(
|
||||
schema = "streamline"
|
||||
) }}
|
||||
{{ create_udf_get_chainhead() }}
|
||||
{{ create_udf_bulk_json_rpc() }}
|
||||
{{ create_udf_bulk_get_traces() }}
|
||||
{{ create_udf_decode_array_string() }}
|
||||
{{ create_udf_decode_array_object() }}
|
||||
{{ create_udf_bulk_decode_logs() }}
|
||||
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{{- fsc_utils.create_udfs() -}}
|
||||
|
||||
@ -1,22 +0,0 @@
|
||||
{% macro standard_predicate(
|
||||
input_column = 'block_number'
|
||||
) -%}
|
||||
{%- set database_name = target.database -%}
|
||||
{%- set schema_name = generate_schema_name(
|
||||
node = model
|
||||
) -%}
|
||||
{%- set table_name = generate_alias_name(
|
||||
node = model
|
||||
) -%}
|
||||
{%- set tmp_table_name = table_name ~ '__dbt_tmp' -%}
|
||||
{%- set full_table_name = database_name ~ '.' ~ schema_name ~ '.' ~ table_name -%}
|
||||
{%- set full_tmp_table_name = database_name ~ '.' ~ schema_name ~ '.' ~ tmp_table_name -%}
|
||||
{{ full_table_name }}.{{ input_column }} >= (
|
||||
SELECT
|
||||
MIN(
|
||||
{{ input_column }}
|
||||
)
|
||||
FROM
|
||||
{{ full_tmp_table_name }}
|
||||
)
|
||||
{%- endmacro %}
|
||||
@ -1,124 +0,0 @@
|
||||
{% macro decoded_logs_history(backfill_mode=false) %}
|
||||
|
||||
{%- set params = {
|
||||
"sql_limit": var("DECODED_LOGS_HISTORY_SQL_LIMIT", 8000000),
|
||||
"producer_batch_size": var("DECODED_LOGS_HISTORY_PRODUCER_BATCH_SIZE", 400000),
|
||||
"worker_batch_size": var("DECODED_LOGS_HISTORY_WORKER_BATCH_SIZE", 100000)
|
||||
} -%}
|
||||
|
||||
{% set wait_time = var("DECODED_LOGS_HISTORY_WAIT_TIME", 60) %}
|
||||
{% set find_months_query %}
|
||||
SELECT
|
||||
DISTINCT date_trunc('month', block_timestamp)::date as month
|
||||
FROM {{ ref('core__fact_blocks') }}
|
||||
ORDER BY month ASC
|
||||
{% endset %}
|
||||
{% set results = run_query(find_months_query) %}
|
||||
|
||||
{% if execute %}
|
||||
{% set months = results.columns[0].values() %}
|
||||
|
||||
{% for month in months %}
|
||||
{% set view_name = 'decoded_logs_history_' ~ month.strftime('%Y_%m') %}
|
||||
|
||||
{% set create_view_query %}
|
||||
create or replace view streamline.{{view_name}} as (
|
||||
WITH target_blocks AS (
|
||||
SELECT
|
||||
block_number
|
||||
FROM {{ ref('core__fact_blocks') }}
|
||||
WHERE date_trunc('month', block_timestamp) = '{{month}}'::timestamp
|
||||
),
|
||||
new_abis AS (
|
||||
SELECT
|
||||
abi,
|
||||
parent_contract_address,
|
||||
event_signature,
|
||||
start_block,
|
||||
end_block
|
||||
FROM {{ ref('silver__complete_event_abis') }}
|
||||
{% if not backfill_mode %}
|
||||
WHERE inserted_timestamp > dateadd('day', -30, sysdate())
|
||||
{% endif %}
|
||||
),
|
||||
existing_logs_to_exclude AS (
|
||||
SELECT _log_id
|
||||
FROM {{ ref('streamline__decoded_logs_complete') }} l
|
||||
INNER JOIN target_blocks b using (block_number)
|
||||
),
|
||||
candidate_logs AS (
|
||||
SELECT
|
||||
l.block_number,
|
||||
l.tx_hash,
|
||||
l.event_index,
|
||||
l.contract_address,
|
||||
l.topics,
|
||||
l.data,
|
||||
concat(l.tx_hash::string, '-', l.event_index::string) as _log_id
|
||||
FROM target_blocks b
|
||||
INNER JOIN {{ ref('core__fact_event_logs') }} l using (block_number)
|
||||
WHERE l.tx_status = 'SUCCESS' and date_trunc('month', l.block_timestamp) = '{{month}}'::timestamp
|
||||
)
|
||||
SELECT
|
||||
l.block_number,
|
||||
l._log_id,
|
||||
A.abi,
|
||||
OBJECT_CONSTRUCT(
|
||||
'topics', l.topics,
|
||||
'data', l.data,
|
||||
'address', l.contract_address
|
||||
) AS data
|
||||
FROM candidate_logs l
|
||||
INNER JOIN new_abis A
|
||||
ON A.parent_contract_address = l.contract_address
|
||||
AND A.event_signature = l.topics[0]::STRING
|
||||
AND l.block_number BETWEEN A.start_block AND A.end_block
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM existing_logs_to_exclude e
|
||||
WHERE e._log_id = l._log_id
|
||||
)
|
||||
LIMIT {{ params.sql_limit }}
|
||||
)
|
||||
{% endset %}
|
||||
{# Create the view #}
|
||||
{% do run_query(create_view_query) %}
|
||||
{{ log("Created view for month " ~ month.strftime('%Y-%m'), info=True) }}
|
||||
{% if var("STREAMLINE_INVOKE_STREAMS", false) %}
|
||||
{# Check if rows exist first #}
|
||||
{% set check_rows_query %}
|
||||
SELECT EXISTS(SELECT 1 FROM streamline.{{view_name}} LIMIT 1)
|
||||
{% endset %}
|
||||
|
||||
{% set results = run_query(check_rows_query) %}
|
||||
{% set has_rows = results.columns[0].values()[0] %}
|
||||
|
||||
{% if has_rows %}
|
||||
{# Invoke streamline, if rows exist to decode #}
|
||||
{% set decode_query %}
|
||||
SELECT
|
||||
streamline.udf_bulk_decode_logs_v2(
|
||||
PARSE_JSON(
|
||||
$${ "external_table": "decoded_logs",
|
||||
"producer_batch_size": {{ params.producer_batch_size }},
|
||||
"sql_limit": {{ params.sql_limit }},
|
||||
"sql_source": "{{view_name}}",
|
||||
"worker_batch_size": {{ params.worker_batch_size }} }$$
|
||||
)
|
||||
);
|
||||
{% endset %}
|
||||
|
||||
{% do run_query(decode_query) %}
|
||||
{{ log("Triggered decoding for month " ~ month.strftime('%Y-%m'), info=True) }}
|
||||
{# Call wait since we actually did some decoding #}
|
||||
{% do run_query("call system$wait(" ~ wait_time ~ ")") %}
|
||||
{{ log("Completed wait after decoding for month " ~ month.strftime('%Y-%m'), info=True) }}
|
||||
{% else %}
|
||||
{{ log("No rows to decode for month " ~ month.strftime('%Y-%m'), info=True) }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,29 +0,0 @@
|
||||
{% macro run_decoded_logs_history() %}
|
||||
|
||||
{% set blockchain = var('GLOBAL_PROD_DB_NAME','').lower() %}
|
||||
|
||||
{% set check_for_new_user_abis_query %}
|
||||
select 1
|
||||
from {{ ref('silver__user_verified_abis') }}
|
||||
where _inserted_timestamp::date = sysdate()::date
|
||||
and dayname(sysdate()) <> 'Sat'
|
||||
{% endset %}
|
||||
{% set results = run_query(check_for_new_user_abis_query) %}
|
||||
{% if execute %}
|
||||
{% set new_user_abis = results.columns[0].values()[0] %}
|
||||
|
||||
{% if new_user_abis %}
|
||||
{% set invoke_workflow_query %}
|
||||
SELECT
|
||||
github_actions.workflow_dispatches(
|
||||
'FlipsideCrypto',
|
||||
'{{ blockchain }}' || '-models',
|
||||
'dbt_run_streamline_decoded_logs_history.yml',
|
||||
NULL
|
||||
)
|
||||
{% endset %}
|
||||
|
||||
{% do run_query(invoke_workflow_query) %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
@ -1,226 +0,0 @@
|
||||
{% macro silver_traces_v1(
|
||||
full_reload_start_block,
|
||||
full_reload_blocks,
|
||||
full_reload_mode = false,
|
||||
TRACES_ARB_MODE = false,
|
||||
TRACES_SEI_MODE = false,
|
||||
TRACES_KAIA_MODE = false,
|
||||
use_partition_key = false,
|
||||
schema_name = 'bronze'
|
||||
) %}
|
||||
WITH bronze_traces AS (
|
||||
SELECT
|
||||
block_number,
|
||||
{% if use_partition_key %}
|
||||
partition_key,
|
||||
{% else %}
|
||||
_partition_by_block_id AS partition_key,
|
||||
{% endif %}
|
||||
|
||||
VALUE :array_index :: INT AS tx_position,
|
||||
DATA :result AS full_traces,
|
||||
{% if TRACES_SEI_MODE %}
|
||||
DATA :txHash :: STRING AS tx_hash,
|
||||
{% endif %}
|
||||
_inserted_timestamp
|
||||
FROM
|
||||
|
||||
{% if is_incremental() and not full_reload_mode %}
|
||||
{{ ref(
|
||||
schema_name ~ '__traces'
|
||||
) }}
|
||||
WHERE
|
||||
_inserted_timestamp >= (
|
||||
SELECT
|
||||
MAX(_inserted_timestamp) _inserted_timestamp
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
AND DATA :result IS NOT NULL {% if TRACES_ARB_MODE %}
|
||||
AND block_number > 22207817
|
||||
{% endif %}
|
||||
|
||||
{% elif is_incremental() and full_reload_mode %}
|
||||
{{ ref(
|
||||
schema_name ~ '__traces_fr'
|
||||
) }}
|
||||
WHERE
|
||||
{% if use_partition_key %}
|
||||
partition_key BETWEEN (
|
||||
SELECT
|
||||
MAX(partition_key) - 100000
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
AND (
|
||||
SELECT
|
||||
MAX(partition_key) + {{ full_reload_blocks }}
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% else %}
|
||||
_partition_by_block_id BETWEEN (
|
||||
SELECT
|
||||
MAX(_partition_by_block_id) - 100000
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
AND (
|
||||
SELECT
|
||||
MAX(_partition_by_block_id) + {{ full_reload_blocks }}
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
|
||||
{% if TRACES_ARB_MODE %}
|
||||
AND block_number > 22207817
|
||||
{% endif %}
|
||||
{% else %}
|
||||
{{ ref(
|
||||
schema_name ~ '__traces_fr'
|
||||
) }}
|
||||
WHERE
|
||||
{% if use_partition_key %}
|
||||
partition_key <= {{ full_reload_start_block }}
|
||||
{% else %}
|
||||
_partition_by_block_id <= {{ full_reload_start_block }}
|
||||
{% endif %}
|
||||
|
||||
{% if TRACES_ARB_MODE %}
|
||||
AND block_number > 22207817
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
qualify(ROW_NUMBER() over (PARTITION BY block_number, tx_position
|
||||
ORDER BY
|
||||
_inserted_timestamp DESC)) = 1
|
||||
),
|
||||
flatten_traces AS (
|
||||
SELECT
|
||||
block_number,
|
||||
{% if TRACES_SEI_MODE %}
|
||||
tx_hash,
|
||||
{% else %}
|
||||
tx_position,
|
||||
{% endif %}
|
||||
partition_key,
|
||||
IFF(
|
||||
path IN (
|
||||
'result',
|
||||
'result.value',
|
||||
'result.type',
|
||||
'result.to',
|
||||
'result.input',
|
||||
'result.gasUsed',
|
||||
'result.gas',
|
||||
'result.from',
|
||||
'result.output',
|
||||
'result.error',
|
||||
'result.revertReason',
|
||||
'result.time',
|
||||
'gasUsed',
|
||||
'gas',
|
||||
'type',
|
||||
'to',
|
||||
'from',
|
||||
'value',
|
||||
'input',
|
||||
'error',
|
||||
'output',
|
||||
'time',
|
||||
'revertReason'
|
||||
{% if TRACES_ARB_MODE %},
|
||||
'afterEVMTransfers',
|
||||
'beforeEVMTransfers',
|
||||
'result.afterEVMTransfers',
|
||||
'result.beforeEVMTransfers'
|
||||
{% endif %}
|
||||
{% if TRACES_KAIA_MODE %},
|
||||
'reverted',
|
||||
'result.reverted'
|
||||
{% endif %}
|
||||
),
|
||||
'ORIGIN',
|
||||
REGEXP_REPLACE(REGEXP_REPLACE(path, '[^0-9]+', '_'), '^_|_$', '')
|
||||
) AS trace_address,
|
||||
_inserted_timestamp,
|
||||
OBJECT_AGG(
|
||||
key,
|
||||
VALUE
|
||||
) AS trace_json,
|
||||
CASE
|
||||
WHEN trace_address = 'ORIGIN' THEN NULL
|
||||
WHEN POSITION(
|
||||
'_' IN trace_address
|
||||
) = 0 THEN 'ORIGIN'
|
||||
ELSE REGEXP_REPLACE(
|
||||
trace_address,
|
||||
'_[0-9]+$',
|
||||
'',
|
||||
1,
|
||||
1
|
||||
)
|
||||
END AS parent_trace_address,
|
||||
SPLIT(
|
||||
trace_address,
|
||||
'_'
|
||||
) AS trace_address_array
|
||||
FROM
|
||||
bronze_traces txs,
|
||||
TABLE(
|
||||
FLATTEN(
|
||||
input => PARSE_JSON(
|
||||
txs.full_traces
|
||||
),
|
||||
recursive => TRUE
|
||||
)
|
||||
) f
|
||||
WHERE
|
||||
f.index IS NULL
|
||||
AND f.key != 'calls'
|
||||
AND f.path != 'result'
|
||||
{% if TRACES_ARB_MODE %}
|
||||
AND f.path NOT LIKE 'afterEVMTransfers[%'
|
||||
AND f.path NOT LIKE 'beforeEVMTransfers[%'
|
||||
{% endif %}
|
||||
{% if TRACES_KAIA_MODE %}
|
||||
and f.key not in ('message', 'contract')
|
||||
{% endif %}
|
||||
GROUP BY
|
||||
block_number,
|
||||
{% if TRACES_SEI_MODE %}
|
||||
tx_hash,
|
||||
{% else %}
|
||||
tx_position,
|
||||
{% endif %}
|
||||
partition_key,
|
||||
trace_address,
|
||||
_inserted_timestamp
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
{% if TRACES_SEI_MODE %}
|
||||
tx_hash,
|
||||
{% else %}
|
||||
tx_position,
|
||||
{% endif %}
|
||||
trace_address,
|
||||
parent_trace_address,
|
||||
trace_address_array,
|
||||
trace_json,
|
||||
partition_key,
|
||||
_inserted_timestamp,
|
||||
{{ dbt_utils.generate_surrogate_key(
|
||||
['block_number'] +
|
||||
(['tx_hash'] if TRACES_SEI_MODE else ['tx_position']) +
|
||||
['trace_address']
|
||||
) }} AS traces_id,
|
||||
SYSDATE() AS inserted_timestamp,
|
||||
SYSDATE() AS modified_timestamp,
|
||||
'{{ invocation_id }}' AS _invocation_id
|
||||
FROM
|
||||
flatten_traces qualify(ROW_NUMBER() over(PARTITION BY traces_id
|
||||
ORDER BY
|
||||
_inserted_timestamp DESC)) = 1
|
||||
{% endmacro %}
|
||||
@ -1,101 +0,0 @@
|
||||
{% macro streamline_external_table_query_decoder(
|
||||
source_name,
|
||||
source_version
|
||||
) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
job_created_time AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number,
|
||||
TO_DATE(
|
||||
concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5))
|
||||
) AS _partition_by_created_date
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_file_registration_history(
|
||||
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
|
||||
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}')
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
id :: STRING AS id,
|
||||
DATA,
|
||||
metadata,
|
||||
b.file_name,
|
||||
_inserted_timestamp,
|
||||
s._partition_by_block_number AS _partition_by_block_number,
|
||||
s._partition_by_created_date AS _partition_by_created_date
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
source_name ~ source_version
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b._partition_by_block_number = s._partition_by_block_number
|
||||
AND b._partition_by_created_date = s._partition_by_created_date
|
||||
WHERE
|
||||
b._partition_by_block_number = s._partition_by_block_number
|
||||
AND b._partition_by_created_date = s._partition_by_created_date
|
||||
AND s._partition_by_created_date >= DATEADD('day', -2, CURRENT_TIMESTAMP())
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro streamline_external_table_query_decoder_fr(
|
||||
source_name,
|
||||
source_version
|
||||
) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
registered_on AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number,
|
||||
TO_DATE(
|
||||
concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5))
|
||||
) AS _partition_by_created_date
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_files(
|
||||
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}'
|
||||
)
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
id :: STRING AS id,
|
||||
DATA,
|
||||
metadata,
|
||||
b.file_name,
|
||||
_inserted_timestamp,
|
||||
s._partition_by_block_number AS _partition_by_block_number,
|
||||
s._partition_by_created_date AS _partition_by_created_date
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
source_name ~ source_version
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b._partition_by_block_number = s._partition_by_block_number
|
||||
AND b._partition_by_created_date = s._partition_by_created_date
|
||||
WHERE
|
||||
b._partition_by_block_number = s._partition_by_block_number
|
||||
AND b._partition_by_created_date = s._partition_by_created_date
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
{% endmacro %}
|
||||
@ -1,141 +0,0 @@
|
||||
{% macro streamline_external_table_query(
|
||||
source_name,
|
||||
source_version,
|
||||
partition_function,
|
||||
balances,
|
||||
block_number,
|
||||
uses_receipts_by_hash
|
||||
) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
job_created_time AS _inserted_timestamp,
|
||||
file_name,
|
||||
{{ partition_function }} AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_file_registration_history(
|
||||
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
|
||||
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}')
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
b._inserted_timestamp
|
||||
|
||||
{% if balances %},
|
||||
r.block_timestamp :: TIMESTAMP AS block_timestamp
|
||||
{% endif %}
|
||||
|
||||
{% if block_number %},
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: STRING,
|
||||
s.metadata :request :"data" :id :: STRING,
|
||||
PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: STRING
|
||||
) :: INT AS block_number
|
||||
{% endif %}
|
||||
{% if uses_receipts_by_hash %},
|
||||
s.value :"TX_HASH" :: STRING AS tx_hash
|
||||
{% endif %}
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
source_name ~ source_version
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.partition_key
|
||||
|
||||
{% if balances %}
|
||||
JOIN {{ ref('_block_ranges') }}
|
||||
r
|
||||
ON r.block_number = COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: INT,
|
||||
s.value :"block_number" :: INT
|
||||
)
|
||||
{% endif %}
|
||||
WHERE
|
||||
b.partition_key = s.partition_key
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
{% endmacro %}
|
||||
|
||||
{% macro streamline_external_table_query_fr(
|
||||
source_name,
|
||||
source_version,
|
||||
partition_function,
|
||||
partition_join_key,
|
||||
balances,
|
||||
block_number,
|
||||
uses_receipts_by_hash
|
||||
) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
registered_on AS _inserted_timestamp,
|
||||
file_name,
|
||||
{{ partition_function }} AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_files(
|
||||
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}'
|
||||
)
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
b._inserted_timestamp
|
||||
|
||||
{% if balances %},
|
||||
r.block_timestamp :: TIMESTAMP AS block_timestamp
|
||||
{% endif %}
|
||||
|
||||
{% if block_number %},
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: STRING,
|
||||
s.value :"block_number" :: STRING,
|
||||
s.metadata :request :"data" :id :: STRING,
|
||||
PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: STRING
|
||||
) :: INT AS block_number
|
||||
{% endif %}
|
||||
{% if uses_receipts_by_hash %},
|
||||
s.value :"TX_HASH" :: STRING AS tx_hash
|
||||
{% endif %}
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
source_name ~ source_version
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.{{ partition_join_key }}
|
||||
|
||||
{% if balances %}
|
||||
JOIN {{ ref('_block_ranges') }}
|
||||
r
|
||||
ON r.block_number = COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: INT,
|
||||
s.value :"block_number" :: INT
|
||||
)
|
||||
{% endif %}
|
||||
WHERE
|
||||
b.partition_key = s.{{ partition_join_key }}
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
{% endmacro %}
|
||||
@ -1,36 +0,0 @@
|
||||
{% macro log_bronze_details(source_name, source_version, model_type, partition_function, partition_join_key, block_number, uses_receipts_by_hash) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
{% if model_type != '' %}
|
||||
{% set model_type = '_' ~ model_type %}
|
||||
{% endif %}
|
||||
|
||||
{%- if flags.WHICH == 'compile' and execute -%}
|
||||
|
||||
{{ log("=== Current Variable Settings ===", info=True) }}
|
||||
{{ log(source_name ~ model_type ~ '_PARTITION_FUNCTION: ' ~ partition_function, info=True) }}
|
||||
{{ log(source_name ~ model_type ~ '_PARTITION_JOIN_KEY: ' ~ partition_join_key, info=True) }}
|
||||
{{ log(source_name ~ model_type ~ '_BLOCK_NUMBER: ' ~ block_number, info=True) }}
|
||||
{% if uses_receipts_by_hash %}
|
||||
{{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }}
|
||||
{% endif %}
|
||||
|
||||
{{ log("", info=True) }}
|
||||
{{ log("=== Source Details ===", info=True) }}
|
||||
{{ log("Source: " ~ source('bronze_streamline', source_name.lower() ~ source_version.lower()), info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{% set config_log = '\n' %}
|
||||
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
|
||||
{% set config_log = config_log ~ '\n{{ config (\n' %}
|
||||
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
|
||||
{% set config_log = config_log ~ ') }}\n' %}
|
||||
{{ log(config_log, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{%- endif -%}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,29 +0,0 @@
|
||||
{% macro log_complete_details(post_hook, full_refresh_type, uses_receipts_by_hash) %}
|
||||
|
||||
{%- if flags.WHICH == 'compile' and execute -%}
|
||||
|
||||
{% if uses_receipts_by_hash %}
|
||||
|
||||
{{ log("=== Current Variable Settings ===", info=True) }}
|
||||
{{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% set config_log = '\n' %}
|
||||
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
|
||||
{% set config_log = config_log ~ '\n{{ config (\n' %}
|
||||
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' unique_key = "' ~ config.get('unique_key') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' cluster_by = "' ~ config.get('cluster_by') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' merge_update_columns = ' ~ config.get('merge_update_columns') | tojson ~ ',\n' %}
|
||||
{% set config_log = config_log ~ ' post_hook = "' ~ post_hook ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' incremental_predicates = ' ~ config.get('incremental_predicates') | tojson ~ ',\n' %}
|
||||
{% set config_log = config_log ~ ' full_refresh = ' ~ full_refresh_type ~ ',\n' %}
|
||||
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
|
||||
{% set config_log = config_log ~ ') }}\n' %}
|
||||
{{ log(config_log, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{%- endif -%}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,36 +0,0 @@
|
||||
{% macro log_model_details(vars=false, params=false) %}
|
||||
|
||||
{%- if execute -%}
|
||||
/*
|
||||
DBT Model Config:
|
||||
{{ model.config | tojson(indent=2) }}
|
||||
*/
|
||||
|
||||
{% if vars is not false %}
|
||||
|
||||
{% if var('LOG_MODEL_DETAILS', false) %}
|
||||
{{ log( vars | tojson(indent=2), info=True) }}
|
||||
{% endif %}
|
||||
/*
|
||||
Variables:
|
||||
{{ vars | tojson(indent=2) }}
|
||||
*/
|
||||
{% endif %}
|
||||
|
||||
{% if params is not false %}
|
||||
|
||||
{% if var('LOG_MODEL_DETAILS', false) %}
|
||||
{{ log( params | tojson(indent=2), info=True) }}
|
||||
{% endif %}
|
||||
/*
|
||||
Parameters:
|
||||
{{ params | tojson(indent=2) }}
|
||||
*/
|
||||
{% endif %}
|
||||
|
||||
/*
|
||||
Raw Code:
|
||||
{{ model.raw_code }}
|
||||
*/
|
||||
{%- endif -%}
|
||||
{% endmacro %}
|
||||
@ -1,55 +0,0 @@
|
||||
{% macro log_streamline_details(model_name, model_type, node_url, model_quantum_state, sql_limit, testing_limit, order_by_clause, new_build, streamline_params, uses_receipts_by_hash, method, method_params, min_block=0) %}
|
||||
|
||||
{%- if flags.WHICH == 'compile' and execute -%}
|
||||
|
||||
{{ log("=== Current Variable Settings ===", info=True) }}
|
||||
{{ log("START_UP_BLOCK: " ~ min_block, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{{ log("=== API Details ===", info=True) }}
|
||||
|
||||
{{ log("NODE_URL: " ~ node_url, info=True) }}
|
||||
{{ log("NODE_SECRET_PATH: " ~ var('GLOBAL_NODE_SECRET_PATH'), info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{{ log("=== Current Variable Settings ===", info=True) }}
|
||||
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_model_quantum_state').upper() ~ ': ' ~ model_quantum_state, info=True) }}
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_sql_limit').upper() ~ ': ' ~ sql_limit, info=True) }}
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_testing_limit').upper() ~ ': ' ~ testing_limit, info=True) }}
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper() ~ ': ' ~ order_by_clause, info=True) }}
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_new_build').upper() ~ ': ' ~ new_build, info=True) }}
|
||||
{{ log('USES_RECEIPTS_BY_HASH' ~ ': ' ~ uses_receipts_by_hash, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{{ log("=== RPC Details ===", info=True) }}
|
||||
|
||||
{{ log(model_name ~ ": {", info=True) }}
|
||||
{{ log(" method: '" ~ method ~ "',", info=True) }}
|
||||
{{ log(" method_params: " ~ method_params, info=True) }}
|
||||
{{ log("}", info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{% set params_str = streamline_params | tojson %}
|
||||
{% set params_formatted = params_str | replace('{', '{\n ') | replace('}', '\n }') | replace(', ', ',\n ') %}
|
||||
|
||||
{# Clean up the method_params formatting #}
|
||||
{% set params_formatted = params_formatted | replace('"method_params": "', '"method_params": "') | replace('\\n', ' ') | replace('\\u0027', "'") %}
|
||||
|
||||
{% set config_log = '\n' %}
|
||||
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
|
||||
{% set config_log = config_log ~ '\n{{ config (\n' %}
|
||||
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' post_hook = fsc_utils.if_data_call_function_v2(\n' %}
|
||||
{% set config_log = config_log ~ ' func = "streamline.udf_bulk_rest_api_v2",\n' %}
|
||||
{% set config_log = config_log ~ ' target = "' ~ this.schema ~ '.' ~ this.identifier ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' params = ' ~ params_formatted ~ '\n' %}
|
||||
{% set config_log = config_log ~ ' ),\n' %}
|
||||
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
|
||||
{% set config_log = config_log ~ ') }}\n' %}
|
||||
{{ log(config_log, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{%- endif -%}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,47 +0,0 @@
|
||||
{% macro set_default_variables_streamline(model_name, model_type) %}
|
||||
|
||||
{%- set node_url = var('GLOBAL_NODE_URL', '{Service}/{Authentication}') -%}
|
||||
{%- set node_secret_path = var('GLOBAL_NODE_SECRET_PATH', '') -%}
|
||||
{%- set model_quantum_state = var((model_name ~ '_' ~ model_type ~ '_quantum_state').upper(), 'streamline') -%}
|
||||
{%- set testing_limit = var((model_name ~ '_' ~ model_type ~ '_testing_limit').upper(), none) -%}
|
||||
{%- set new_build = var((model_name ~ '_' ~ model_type ~ '_new_build').upper(), false) -%}
|
||||
{%- set default_order = 'ORDER BY partition_key DESC, block_number DESC' if model_type.lower() == 'realtime'
|
||||
else 'ORDER BY partition_key ASC, block_number ASC' -%}
|
||||
{%- set order_by_clause = var((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper(), default_order) -%}
|
||||
{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%}
|
||||
|
||||
{%- set variables = {
|
||||
'node_url': node_url,
|
||||
'node_secret_path': node_secret_path,
|
||||
'model_quantum_state': model_quantum_state,
|
||||
'testing_limit': testing_limit,
|
||||
'new_build': new_build,
|
||||
'order_by_clause': order_by_clause,
|
||||
'uses_receipts_by_hash': uses_receipts_by_hash
|
||||
} -%}
|
||||
|
||||
{{ return(variables) }}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
{% macro set_default_variables_bronze(source_name, model_type) %}
|
||||
|
||||
{%- set partition_function = var(source_name ~ model_type ~ '_PARTITION_FUNCTION',
|
||||
"CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER)")
|
||||
-%}
|
||||
{%- set partition_join_key = var(source_name ~ model_type ~ '_PARTITION_JOIN_KEY', 'partition_key') -%}
|
||||
{%- set block_number = var(source_name ~ model_type ~ '_BLOCK_NUMBER', true) -%}
|
||||
{%- set balances = var(source_name ~ model_type ~ '_BALANCES', false) -%}
|
||||
{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%}
|
||||
|
||||
{%- set variables = {
|
||||
'partition_function': partition_function,
|
||||
'partition_join_key': partition_join_key,
|
||||
'block_number': block_number,
|
||||
'balances': balances,
|
||||
'uses_receipts_by_hash': uses_receipts_by_hash
|
||||
} -%}
|
||||
|
||||
{{ return(variables) }}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,57 +0,0 @@
|
||||
{% macro set_streamline_parameters(model_name, model_type, multiplier=1) %}
|
||||
|
||||
{%- set rpc_config_details = {
|
||||
"blocks_transactions": {
|
||||
"method": 'eth_getBlockByNumber',
|
||||
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE)',
|
||||
"exploded_key": ['result', 'result.transactions']
|
||||
},
|
||||
"receipts_by_hash": {
|
||||
"method": 'eth_getTransactionReceipt',
|
||||
"method_params": 'ARRAY_CONSTRUCT(tx_hash)'
|
||||
},
|
||||
"receipts": {
|
||||
"method": 'eth_getBlockReceipts',
|
||||
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number))',
|
||||
"exploded_key": ['result'],
|
||||
"lambdas": 2
|
||||
|
||||
},
|
||||
"traces": {
|
||||
"method": 'debug_traceBlockByNumber',
|
||||
"method_params": "ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s'))",
|
||||
"exploded_key": ['result'],
|
||||
"lambdas": 2
|
||||
},
|
||||
"confirm_blocks": {
|
||||
"method": 'eth_getBlockByNumber',
|
||||
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), FALSE)'
|
||||
}
|
||||
} -%}
|
||||
|
||||
{%- set rpc_config = rpc_config_details[model_name.lower()] -%}
|
||||
|
||||
{%- set params = {
|
||||
"external_table": var((model_name ~ '_' ~ model_type ~ '_external_table').upper(), model_name.lower()),
|
||||
"sql_limit": var((model_name ~ '_' ~ model_type ~ '_sql_limit').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier),
|
||||
"producer_batch_size": var((model_name ~ '_' ~ model_type ~ '_producer_batch_size').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier),
|
||||
"worker_batch_size": var(
|
||||
(model_name ~ '_' ~ model_type ~ '_worker_batch_size').upper(),
|
||||
(2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier) // (rpc_config.get('lambdas', 1))
|
||||
),
|
||||
"sql_source": (model_name ~ '_' ~ model_type).lower(),
|
||||
"method": rpc_config['method'],
|
||||
"method_params": rpc_config['method_params']
|
||||
} -%}
|
||||
|
||||
{%- if rpc_config.get('exploded_key') is not none -%}
|
||||
{%- do params.update({"exploded_key": tojson(rpc_config['exploded_key'])}) -%}
|
||||
{%- endif -%}
|
||||
|
||||
{%- if rpc_config.get('lambdas') is not none -%}
|
||||
{%- do params.update({"lambdas": rpc_config['lambdas']}) -%}
|
||||
{%- endif -%}
|
||||
|
||||
{{ return(params) }}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,12 +0,0 @@
|
||||
{% macro lookback() %}
|
||||
{% if execute and is_incremental() %}
|
||||
{% set query %}
|
||||
SELECT
|
||||
MAX(_inserted_timestamp) :: DATE - 3
|
||||
FROM
|
||||
{{ this }};
|
||||
{% endset %}
|
||||
{% set last_week = run_query(query).columns [0] [0] %}
|
||||
{% do return(last_week) %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
@ -1,11 +0,0 @@
|
||||
{% macro create_aws_avalanche_api() %}
|
||||
{% if target.name == "prod" %}
|
||||
{% set sql %}
|
||||
CREATE api integration IF NOT EXISTS aws_avalanche_api api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/snowflake-api-avalanche' api_allowed_prefixes = (
|
||||
'https://87yvk5d2sf.execute-api.us-east-1.amazonaws.com/prod/',
|
||||
'https://28hv9m0ra8.execute-api.us-east-1.amazonaws.com/dev/'
|
||||
) enabled = TRUE;
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
@ -1,24 +0,0 @@
|
||||
{% macro create_udtf_get_base_table(schema) %}
|
||||
create or replace function {{ schema }}.udtf_get_base_table(max_height integer)
|
||||
returns table (height number)
|
||||
as
|
||||
$$
|
||||
with base as (
|
||||
select
|
||||
row_number() over (
|
||||
order by
|
||||
seq4()
|
||||
) as id
|
||||
from
|
||||
table(generator(rowcount => 100000000))
|
||||
)
|
||||
select
|
||||
id as height
|
||||
from
|
||||
base
|
||||
where
|
||||
id <= max_height
|
||||
$$
|
||||
;
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,66 +0,0 @@
|
||||
{% macro create_udf_get_chainhead() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_get_chainhead() returns variant api_integration = aws_avalanche_api AS {% if target.name == "prod" %}
|
||||
'https://87yvk5d2sf.execute-api.us-east-1.amazonaws.com/prod/get_chainhead'
|
||||
{% else %}
|
||||
'https://28hv9m0ra8.execute-api.us-east-1.amazonaws.com/dev/get_chainhead'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_bulk_json_rpc() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_json_rpc(
|
||||
json variant
|
||||
) returns text api_integration = aws_avalanche_api AS {% if target.name == "prod" %}
|
||||
'https://87yvk5d2sf.execute-api.us-east-1.amazonaws.com/prod/udf_bulk_json_rpc'
|
||||
{% else %}
|
||||
'https://28hv9m0ra8.execute-api.us-east-1.amazonaws.com/dev/udf_bulk_json_rpc'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_bulk_get_traces() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_get_traces(
|
||||
json variant
|
||||
) returns text api_integration = aws_avalanche_api AS {% if target.name == "prod" %}
|
||||
'https://87yvk5d2sf.execute-api.us-east-1.amazonaws.com/prod/udf_bulk_get_traces'
|
||||
{% else %}
|
||||
'https://28hv9m0ra8.execute-api.us-east-1.amazonaws.com/dev/udf_bulk_get_traces'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_decode_array_string() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_decode(
|
||||
abi ARRAY,
|
||||
DATA STRING
|
||||
) returns ARRAY api_integration = aws_avalanche_api AS {% if target.name == "prod" %}
|
||||
'https://87yvk5d2sf.execute-api.us-east-1.amazonaws.com/prod/decode_function'
|
||||
{% else %}
|
||||
'https://28hv9m0ra8.execute-api.us-east-1.amazonaws.com/dev/decode_function'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro create_udf_decode_array_object() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_decode(
|
||||
abi ARRAY,
|
||||
DATA OBJECT
|
||||
) returns ARRAY api_integration = aws_avalanche_api AS {% if target.name == "prod" %}
|
||||
'https://87yvk5d2sf.execute-api.us-east-1.amazonaws.com/prod/decode_log'
|
||||
{% else %}
|
||||
'https://28hv9m0ra8.execute-api.us-east-1.amazonaws.com/dev/decode_log'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_bulk_decode_logs() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_decode_logs(
|
||||
json OBJECT
|
||||
) returns ARRAY api_integration = aws_avalanche_api AS {% if target.name == "prod" %}
|
||||
'https://87yvk5d2sf.execute-api.us-east-1.amazonaws.com/prod/bulk_decode_logs'
|
||||
{% else %}
|
||||
'https://28hv9m0ra8.execute-api.us-east-1.amazonaws.com/dev/bulk_decode_logs'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
@ -1,25 +0,0 @@
|
||||
{% test missing_decoded_logs(model) %}
|
||||
SELECT
|
||||
l.block_number,
|
||||
CONCAT(
|
||||
l.tx_hash,
|
||||
'-',
|
||||
l.event_index
|
||||
) AS _log_id
|
||||
FROM
|
||||
{{ ref('core__fact_event_logs') }}
|
||||
l
|
||||
LEFT JOIN {{ model }}
|
||||
d
|
||||
ON l.block_number = d.block_number
|
||||
AND CONCAT(
|
||||
l.tx_hash,
|
||||
'-',
|
||||
l.event_index
|
||||
) = d._log_id
|
||||
WHERE
|
||||
l.contract_address = LOWER('0xB31f66AA3C1e785363F0875A1B74E27b85FD66c7') -- WAVAX
|
||||
AND l.topics [0] :: STRING = '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef' -- Transfer
|
||||
AND l.block_timestamp BETWEEN DATEADD('hour', -48, SYSDATE())
|
||||
AND DATEADD('hour', -6, SYSDATE())
|
||||
AND d._log_id IS NULL {% endtest %}
|
||||
@ -1,254 +0,0 @@
|
||||
{% macro missing_txs(
|
||||
model
|
||||
) %}
|
||||
WITH txs_base AS (
|
||||
SELECT
|
||||
block_number AS base_block_number,
|
||||
tx_hash AS base_tx_hash
|
||||
FROM
|
||||
{{ ref('test_silver__transactions_full') }}
|
||||
),
|
||||
model_name AS (
|
||||
SELECT
|
||||
block_number AS model_block_number,
|
||||
tx_hash AS model_tx_hash
|
||||
FROM
|
||||
{{ model }}
|
||||
)
|
||||
SELECT
|
||||
base_block_number,
|
||||
base_tx_hash,
|
||||
model_block_number,
|
||||
model_tx_hash
|
||||
FROM
|
||||
txs_base
|
||||
LEFT JOIN model_name
|
||||
ON base_block_number = model_block_number
|
||||
AND base_tx_hash = model_tx_hash
|
||||
WHERE
|
||||
model_tx_hash IS NULL
|
||||
OR model_block_number IS NULL
|
||||
{% endmacro %}
|
||||
|
||||
{% macro recent_missing_txs(
|
||||
model
|
||||
) %}
|
||||
WITH txs_base AS (
|
||||
SELECT
|
||||
block_number AS base_block_number,
|
||||
tx_hash AS base_tx_hash
|
||||
FROM
|
||||
{{ ref('test_silver__transactions_recent') }}
|
||||
),
|
||||
model_name AS (
|
||||
SELECT
|
||||
block_number AS model_block_number,
|
||||
tx_hash AS model_tx_hash
|
||||
FROM
|
||||
{{ model }}
|
||||
),
|
||||
FINAL AS (
|
||||
SELECT
|
||||
base_block_number,
|
||||
base_tx_hash,
|
||||
model_block_number,
|
||||
model_tx_hash
|
||||
FROM
|
||||
txs_base
|
||||
LEFT JOIN model_name
|
||||
ON base_block_number = model_block_number
|
||||
AND base_tx_hash = model_tx_hash
|
||||
WHERE
|
||||
model_tx_hash IS NULL
|
||||
OR model_block_number IS NULL
|
||||
),
|
||||
errors AS (
|
||||
SELECT
|
||||
COUNT(*) > 9 AS threshold
|
||||
FROM
|
||||
FINAL
|
||||
)
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
FINAL
|
||||
WHERE
|
||||
(
|
||||
SELECT
|
||||
threshold
|
||||
FROM
|
||||
errors
|
||||
)
|
||||
{% endmacro %}
|
||||
|
||||
{% macro missing_receipts(
|
||||
model
|
||||
) %}
|
||||
WITH txs_base AS (
|
||||
SELECT
|
||||
block_number AS base_block_number,
|
||||
tx_hash AS base_tx_hash
|
||||
FROM
|
||||
{{ ref('test_silver__transactions_full') }}
|
||||
),
|
||||
model_name AS (
|
||||
SELECT
|
||||
block_number AS model_block_number,
|
||||
tx_hash AS model_tx_hash
|
||||
FROM
|
||||
{{ model }}
|
||||
)
|
||||
SELECT
|
||||
base_block_number,
|
||||
base_tx_hash,
|
||||
model_block_number,
|
||||
model_tx_hash
|
||||
FROM
|
||||
txs_base
|
||||
LEFT JOIN model_name
|
||||
ON base_block_number = model_block_number
|
||||
AND base_tx_hash = model_tx_hash
|
||||
WHERE
|
||||
model_tx_hash IS NULL
|
||||
OR model_block_number IS NULL
|
||||
{% endmacro %}
|
||||
|
||||
{% macro recent_missing_receipts(
|
||||
model
|
||||
) %}
|
||||
WITH txs_base AS (
|
||||
SELECT
|
||||
block_number AS base_block_number,
|
||||
tx_hash AS base_tx_hash
|
||||
FROM
|
||||
{{ ref('test_silver__transactions_recent') }}
|
||||
),
|
||||
model_name AS (
|
||||
SELECT
|
||||
block_number AS model_block_number,
|
||||
tx_hash AS model_tx_hash
|
||||
FROM
|
||||
{{ model }}
|
||||
)
|
||||
SELECT
|
||||
base_block_number,
|
||||
base_tx_hash,
|
||||
model_block_number,
|
||||
model_tx_hash
|
||||
FROM
|
||||
txs_base
|
||||
LEFT JOIN model_name
|
||||
ON base_block_number = model_block_number
|
||||
AND base_tx_hash = model_tx_hash
|
||||
WHERE
|
||||
model_tx_hash IS NULL
|
||||
OR model_block_number IS NULL
|
||||
{% endmacro %}
|
||||
|
||||
{% macro missing_traces(
|
||||
model
|
||||
) %}
|
||||
WITH txs_base AS (
|
||||
SELECT
|
||||
block_number AS base_block_number,
|
||||
tx_hash AS base_tx_hash
|
||||
FROM
|
||||
{{ ref('test_silver__transactions_full') }}
|
||||
WHERE
|
||||
from_address <> '0x0000000000000000000000000000000000000000'
|
||||
AND to_address <> '0x0000000000000000000000000000000000000000'
|
||||
),
|
||||
model_name AS (
|
||||
SELECT
|
||||
block_number AS model_block_number,
|
||||
tx_hash AS model_tx_hash
|
||||
FROM
|
||||
{{ model }}
|
||||
)
|
||||
SELECT
|
||||
base_block_number,
|
||||
base_tx_hash,
|
||||
model_block_number,
|
||||
model_tx_hash
|
||||
FROM
|
||||
txs_base
|
||||
LEFT JOIN model_name
|
||||
ON base_block_number = model_block_number
|
||||
AND base_tx_hash = model_tx_hash
|
||||
WHERE
|
||||
model_tx_hash IS NULL
|
||||
OR model_block_number IS NULL
|
||||
{% endmacro %}
|
||||
|
||||
{% macro recent_missing_traces(
|
||||
model
|
||||
) %}
|
||||
WITH txs_base AS (
|
||||
SELECT
|
||||
block_number AS base_block_number,
|
||||
tx_hash AS base_tx_hash
|
||||
FROM
|
||||
{{ ref('test_silver__transactions_recent') }}
|
||||
WHERE
|
||||
from_address <> '0x0000000000000000000000000000000000000000'
|
||||
AND to_address <> '0x0000000000000000000000000000000000000000'
|
||||
),
|
||||
model_name AS (
|
||||
SELECT
|
||||
block_number AS model_block_number,
|
||||
tx_hash AS model_tx_hash
|
||||
FROM
|
||||
{{ model }}
|
||||
)
|
||||
SELECT
|
||||
base_block_number,
|
||||
base_tx_hash,
|
||||
model_block_number,
|
||||
model_tx_hash
|
||||
FROM
|
||||
txs_base
|
||||
LEFT JOIN model_name
|
||||
ON base_block_number = model_block_number
|
||||
AND base_tx_hash = model_tx_hash
|
||||
WHERE
|
||||
model_tx_hash IS NULL
|
||||
OR model_block_number IS NULL
|
||||
{% endmacro %}
|
||||
|
||||
{% macro missing_confirmed_txs(
|
||||
model1,
|
||||
model2
|
||||
) %}
|
||||
WITH txs_base AS (
|
||||
SELECT
|
||||
block_number AS base_block_number,
|
||||
block_hash AS base_block_hash,
|
||||
tx_hash AS base_tx_hash
|
||||
FROM
|
||||
{{ model1 }}
|
||||
),
|
||||
model_name AS (
|
||||
SELECT
|
||||
block_number AS model_block_number,
|
||||
block_hash AS model_block_hash,
|
||||
tx_hash AS model_tx_hash
|
||||
FROM
|
||||
{{ model2 }}
|
||||
)
|
||||
SELECT
|
||||
DISTINCT base_block_number AS block_number
|
||||
FROM
|
||||
txs_base
|
||||
LEFT JOIN model_name
|
||||
ON base_block_number = model_block_number
|
||||
AND base_tx_hash = model_tx_hash
|
||||
AND base_block_hash = model_block_hash
|
||||
WHERE
|
||||
model_tx_hash IS NULL
|
||||
AND model_block_number <= (
|
||||
SELECT
|
||||
MAX(base_block_number)
|
||||
FROM
|
||||
txs_base
|
||||
)
|
||||
{% endmacro %}
|
||||
@ -1,78 +0,0 @@
|
||||
{% macro if_data_call_function(
|
||||
func,
|
||||
target
|
||||
) %}
|
||||
{% if var(
|
||||
"STREAMLINE_INVOKE_STREAMS"
|
||||
) %}
|
||||
{% if execute %}
|
||||
{{ log(
|
||||
"Running macro `if_data_call_function`: Calling udf " ~ func ~ " on " ~ target,
|
||||
True
|
||||
) }}
|
||||
{% endif %}
|
||||
SELECT
|
||||
{{ func }}
|
||||
WHERE
|
||||
EXISTS(
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
{{ target }}
|
||||
LIMIT
|
||||
1
|
||||
)
|
||||
{% else %}
|
||||
{% if execute %}
|
||||
{{ log(
|
||||
"Running macro `if_data_call_function`: NOOP",
|
||||
False
|
||||
) }}
|
||||
{% endif %}
|
||||
SELECT
|
||||
NULL
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro if_data_call_wait() %}
|
||||
{% if var(
|
||||
"STREAMLINE_INVOKE_STREAMS"
|
||||
) %}
|
||||
{% set query %}
|
||||
SELECT
|
||||
1
|
||||
WHERE
|
||||
EXISTS(
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
{{ model.schema ~ "." ~ model.alias }}
|
||||
LIMIT
|
||||
1
|
||||
) {% endset %}
|
||||
{% if execute %}
|
||||
{% set results = run_query(
|
||||
query
|
||||
) %}
|
||||
{% if results %}
|
||||
{{ log(
|
||||
"Waiting...",
|
||||
info = True
|
||||
) }}
|
||||
|
||||
{% set wait_query %}
|
||||
SELECT
|
||||
system$wait(
|
||||
{{ var(
|
||||
"WAIT",
|
||||
600
|
||||
) }}
|
||||
) {% endset %}
|
||||
{% do run_query(wait_query) %}
|
||||
{% else %}
|
||||
SELECT
|
||||
NULL;
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
@ -1,6 +1,6 @@
|
||||
{% docs __overview__ %}
|
||||
|
||||
# Welcome to the Flipside Crypto Avalanche C-Chain Models Documentation!
|
||||
# Welcome to the Flipside Crypto Avalanche Models Documentation!
|
||||
|
||||
## **What does this documentation cover?**
|
||||
The documentation included here details the design of the Avalanche tables and views available via [Flipside Crypto.](https://flipsidecrypto.xyz/) For more information on how these models are built, please see [the github repository.](https://github.com/FlipsideCrypto/avalanche-models)
|
||||
@ -16,35 +16,34 @@ There is more information on how to use dbt docs in the last section of this doc
|
||||
|
||||
**Click on the links below to jump to the documentation for each schema.**
|
||||
|
||||
### Core Tables (avalanche.core)
|
||||
### Core Tables
|
||||
|
||||
**Dimension Tables:**
|
||||
- [dim_contracts](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.core__dim_contracts)
|
||||
- [dim_labels](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.core__dim_labels)
|
||||
**Dimensional Tables**
|
||||
- [dim_contracts](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.fsc_evm.core__dim_contracts)
|
||||
- [dim_contract_abis](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.fsc_evm.core__dim_contract_abis)
|
||||
- [dim_labels](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.fsc_evm.core__dim_labels)
|
||||
|
||||
**Fact Tables:**
|
||||
- [fact_blocks](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.core__fact_blocks)
|
||||
- [fact_decoded_event_logs](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.core__fact_decoded_event_logs)
|
||||
- [fact_event_logs](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.core__fact_event_logs)
|
||||
- [fact_token_transfers](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.core__fact_token_transfers)
|
||||
- [fact_traces](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.core__fact_traces)
|
||||
- [fact_transactions](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.core__fact_transactions)
|
||||
- [fact_blocks](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.fsc_evm.core__fact_blocks)
|
||||
- [fact_transactions](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.fsc_evm.core__fact_transactions)
|
||||
- [fact_event_logs](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.fsc_evm.core__fact_event_logs)
|
||||
- [fact_traces](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.fsc_evm.core__fact_traces)
|
||||
|
||||
**Convenience Tables:**
|
||||
- [ez_decoded_event_logs](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.core__ez_decoded_event_logs)
|
||||
- [ez_native_transfers](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.core__ez_native_transfers)
|
||||
- [ez_token_transfers](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.core__ez_token_transfers)
|
||||
- [ez_decoded_event_logs](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.fsc_evm.core__ez_decoded_event_logs)
|
||||
- [ez_native_transfers](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.fsc_evm.core__ez_native_transfers)
|
||||
- [ez_token_transfers](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.fsc_evm.core__ez_token_transfers)
|
||||
|
||||
### Price Tables (avalanche.price)
|
||||
- [dim_asset_metadata](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.price__dim_asset_metadata)
|
||||
- [fact_prices_ohlc_hourly](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.price__fact_prices_ohlc_hourly)
|
||||
- [ez_asset_metadata](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.price__ez_asset_metadata)
|
||||
- [ez_prices_hourly](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.price__ez_prices_hourly)
|
||||
- [dim_asset_metadata](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.fsc_evm.price__dim_asset_metadata)
|
||||
- [fact_prices_ohlc_hourly](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.fsc_evm.price__fact_prices_ohlc_hourly)
|
||||
- [ez_asset_metadata](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.fsc_evm.price__ez_asset_metadata)
|
||||
- [ez_prices_hourly](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.fsc_evm.price__ez_prices_hourly)
|
||||
|
||||
### DeFi Tables (avalanche.defi)
|
||||
- [dim_dex_liquidity_pools](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.defi__dim_dex_liquidity_pools)
|
||||
- [ez_dex_swaps](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.defi__ez_dex_swaps)
|
||||
- [ez_bridge_activity](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.defi__ez_bridge_activity)
|
||||
- [ez_dex_swaps](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.defi__ez_dex_swaps)
|
||||
- [dim_dex_liquidity_pools](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.defi__dim_dex_liquidity_pools)
|
||||
- [ez_lending_borrows](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.defi__ez_lending_borrows)
|
||||
- [ez_lending_deposits](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.defi__ez_lending_deposits)
|
||||
- [ez_lending_flashloans](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.defi__ez_lending_flashloans)
|
||||
@ -52,12 +51,14 @@ There is more information on how to use dbt docs in the last section of this doc
|
||||
- [ez_lending_repayments](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.defi__ez_lending_repayments)
|
||||
- [ez_lending_withdraws](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.defi__ez_lending_withdraws)
|
||||
|
||||
### Flipside Partner Tables (avalanche.partner_name)
|
||||
|
||||
### NFT Tables (avalanche.nft)
|
||||
- [ez_nft_transfers](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.fsc_evm.nft__ez_nft_transfers)
|
||||
- [ez_nft_sales](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.nft__ez_nft_sales)
|
||||
- [ez_nft_transfers](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.nft__ez_nft_transfers)
|
||||
|
||||
### Stats Tables (avalanche.stats)
|
||||
- [ez_core_metrics_hourly](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.avalanche_models.stats__ez_core_metrics_hourly)
|
||||
- [ez_core_metrics_hourly](https://flipsidecrypto.github.io/avalanche-models/#!/model/model.fsc_evm.stats__ez_core_metrics_hourly)
|
||||
|
||||
### Dexalot Tables (avalanche.dexalot)
|
||||
|
||||
@ -85,18 +86,7 @@ The dimension tables are sourced from a variety of on-chain and off-chain source
|
||||
|
||||
Convenience views (denoted ez_) are a combination of different fact and dimension tables. These views are built to make it easier to query the data.
|
||||
|
||||
## **Contract Decoding**
|
||||
### Adding a contract for decoding
|
||||
To add a contract for decoding, please visit [here](https://science.flipsidecrypto.xyz/abi-requestor/).
|
||||
|
||||
Assuming the submitted ABI is valid, records will be decoded within 24 hours. If records are not decoded within 24 hours, or for any ABI updates, please submit a ticket within our Discord.
|
||||
|
||||
### General Process Overview
|
||||
|
||||
The majority of our ABIs have been sourced from Etherscan, and we are constantly asking Etherscan for new ABIs. However, this is not comprehensive, and therefore we must also rely on our users to submit ABIs for decoding.
|
||||
If we are unable to locate an ABI for the contract from either Etherscan or our users, we will attempt to match the contract to a similar ABI. This is done by comparing the contract bytecode to a list of known contract bytecodes. If we are able to match the contract to a similar ABI, we will decode the contract using the similar ABI. You can see the source of each ABI in the `dim_contract_abis` table within the `abi_source` column.
|
||||
|
||||
Once ABIs have been verified, events within the last day of blocks will be decoded within approximately 90 minutes. Events older than 1 day will be decoded within 24 hours in the majority of cases. The exception here is if the contract has a massive number of events, in which case it may take longer.
|
||||
NOTE: Avalanche is currently operating in it's Testnet phase. Flipside will provide Mainnet data tables once Avalanche Mainnet is deployed.
|
||||
|
||||
## **Using dbt docs**
|
||||
### Navigation
|
||||
@ -105,7 +95,7 @@ You can use the ```Project``` and ```Database``` navigation tabs on the left sid
|
||||
|
||||
### Database Tab
|
||||
|
||||
This view shows relations (tables and views) grouped into database schemas. Note that ephemeral models are *not* shown in this interface, as they do not exist in the database.
|
||||
This view shows relations (tables and views) grouped into database schemas. Note that ephemeral models are *not* shown in this interface, as they do not exist in the dataavalanche.
|
||||
|
||||
### Graph Exploration
|
||||
|
||||
@ -119,10 +109,11 @@ Note that you can also right-click on models to interactively filter and explore
|
||||
|
||||
|
||||
### **More information**
|
||||
- [Flipside](https://flipsidecrypto.xyz)
|
||||
- [Flipside](https://flipsidecrypto.xyz/)
|
||||
- [Velocity](https://app.flipsidecrypto.com/velocity?nav=Discover)
|
||||
- [Tutorials](https://docs.flipsidecrypto.com/our-data/tutorials)
|
||||
- [Github](https://github.com/FlipsideCrypto/avalanche-models)
|
||||
- [Query Editor Shortcuts](https://docs.flipsidecrypto.com/velocity/query-editor-shortcuts)
|
||||
- [What is dbt?](https://docs.getdbt.com/docs/introduction)
|
||||
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,87 +0,0 @@
|
||||
{{ config (
|
||||
materialized = "ephemeral"
|
||||
) }}
|
||||
|
||||
WITH retry AS (
|
||||
|
||||
SELECT
|
||||
contract_address,
|
||||
GREATEST(
|
||||
latest_call_block,
|
||||
latest_event_block
|
||||
) AS block_number,
|
||||
total_interaction_count
|
||||
FROM
|
||||
{{ ref("silver__relevant_contracts") }}
|
||||
r
|
||||
LEFT JOIN {{ source(
|
||||
'avalanche_silver',
|
||||
'verified_abis'
|
||||
) }}
|
||||
v USING (contract_address)
|
||||
WHERE
|
||||
r.total_interaction_count >= 250 -- high interaction count
|
||||
AND GREATEST(
|
||||
max_inserted_timestamp_logs,
|
||||
max_inserted_timestamp_traces
|
||||
) >= CURRENT_DATE - INTERVAL '30 days' -- recent activity
|
||||
AND v.contract_address IS NULL -- no verified abi
|
||||
AND r.contract_address NOT IN (
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ source(
|
||||
'avalanche_bronze_api',
|
||||
'contract_abis'
|
||||
) }}
|
||||
WHERE
|
||||
_inserted_timestamp >= CURRENT_DATE - INTERVAL '30 days' -- this won't let us retry the same contract within 30 days
|
||||
AND abi_data :data :result :: STRING <> 'Max rate limit reached'
|
||||
)
|
||||
ORDER BY
|
||||
total_interaction_count DESC
|
||||
LIMIT
|
||||
25
|
||||
), FINAL AS (
|
||||
SELECT
|
||||
proxy_address AS contract_address,
|
||||
start_block AS block_number
|
||||
FROM
|
||||
{{ ref("silver__proxies") }}
|
||||
p
|
||||
JOIN retry r USING (contract_address)
|
||||
LEFT JOIN {{ source(
|
||||
'avalanche_silver',
|
||||
'verified_abis'
|
||||
) }}
|
||||
v
|
||||
ON v.contract_address = p.proxy_address
|
||||
WHERE
|
||||
v.contract_address IS NULL
|
||||
AND p.contract_address NOT IN (
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ source(
|
||||
'avalanche_bronze_api',
|
||||
'contract_abis'
|
||||
) }}
|
||||
WHERE
|
||||
_inserted_timestamp >= CURRENT_DATE - INTERVAL '30 days' -- this won't let us retry the same contract within 30 days
|
||||
AND abi_data :data :result :: STRING <> 'Max rate limit reached'
|
||||
)
|
||||
UNION ALL
|
||||
SELECT
|
||||
contract_address,
|
||||
block_number
|
||||
FROM
|
||||
retry
|
||||
)
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
FINAL qualify ROW_NUMBER() over (
|
||||
PARTITION BY contract_address
|
||||
ORDER BY
|
||||
block_number DESC
|
||||
) = 1
|
||||
@ -1,80 +0,0 @@
|
||||
{{ config(
|
||||
materialized = 'incremental',
|
||||
unique_key = "contract_address",
|
||||
full_refresh = false,
|
||||
tags = ['curated']
|
||||
) }}
|
||||
|
||||
WITH base AS (
|
||||
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ ref('silver__relevant_contracts') }}
|
||||
WHERE
|
||||
total_interaction_count >= 100
|
||||
|
||||
{% if is_incremental() %}
|
||||
and contract_address not in (
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ this }}
|
||||
WHERE
|
||||
abi_data :data :result :: STRING <> 'Max rate limit reached'
|
||||
)
|
||||
{% endif %}
|
||||
order by total_interaction_count desc
|
||||
LIMIT
|
||||
50
|
||||
), all_contracts AS (
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
base
|
||||
UNION
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ ref('_retry_abis') }}
|
||||
),
|
||||
row_nos AS (
|
||||
SELECT
|
||||
contract_address,
|
||||
ROW_NUMBER() over (
|
||||
ORDER BY
|
||||
contract_address
|
||||
) AS row_no
|
||||
FROM
|
||||
all_contracts
|
||||
),
|
||||
batched AS ({% for item in range(101) %}
|
||||
SELECT
|
||||
rn.contract_address,
|
||||
live.udf_api('GET',
|
||||
CONCAT('https://api.routescan.io/v2/network/mainnet/evm/', '43114', --avax C-chain ID
|
||||
'/etherscan/api?module=contract&action=getabi&address=',
|
||||
contract_address, '&apikey=none'),
|
||||
OBJECT_CONSTRUCT(
|
||||
'Content-Type', 'application/json',
|
||||
'fsc-quantum-state', 'livequery'
|
||||
),
|
||||
NULL,
|
||||
''
|
||||
) AS abi_data,
|
||||
SYSDATE() AS _inserted_timestamp
|
||||
FROM
|
||||
row_nos rn
|
||||
WHERE
|
||||
row_no = {{ item }}
|
||||
|
||||
{% if not loop.last %}
|
||||
UNION ALL
|
||||
{% endif %}
|
||||
{% endfor %})
|
||||
SELECT
|
||||
contract_address,
|
||||
abi_data,
|
||||
_inserted_timestamp
|
||||
FROM
|
||||
batched
|
||||
@ -1,22 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: bronze_api__contract_abis
|
||||
|
||||
columns:
|
||||
- name: _INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: day
|
||||
interval: 1
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_NTZ
|
||||
- name: CONTRACT_ADDRESS
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- VARCHAR
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: "^(0x)[0-9a-fA-F]{40}$"
|
||||
@ -1,129 +0,0 @@
|
||||
{{ config(
|
||||
materialized = 'incremental',
|
||||
unique_key = "contract_address",
|
||||
full_refresh = false,
|
||||
tags = ['non_realtime']
|
||||
) }}
|
||||
|
||||
WITH base AS (
|
||||
|
||||
SELECT
|
||||
contract_address,
|
||||
latest_event_block AS latest_block
|
||||
FROM
|
||||
{{ ref('silver__relevant_contracts') }}
|
||||
WHERE
|
||||
total_event_count >= 25
|
||||
|
||||
{% if is_incremental() %}
|
||||
AND contract_address NOT IN (
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
LIMIT
|
||||
500
|
||||
), function_sigs AS (
|
||||
SELECT
|
||||
'0x313ce567' AS function_sig,
|
||||
'decimals' AS function_name
|
||||
UNION
|
||||
SELECT
|
||||
'0x06fdde03',
|
||||
'name'
|
||||
UNION
|
||||
SELECT
|
||||
'0x95d89b41',
|
||||
'symbol'
|
||||
),
|
||||
all_reads AS (
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
base
|
||||
JOIN function_sigs
|
||||
ON 1 = 1
|
||||
),
|
||||
ready_reads AS (
|
||||
SELECT
|
||||
contract_address,
|
||||
latest_block,
|
||||
function_sig,
|
||||
RPAD(
|
||||
function_sig,
|
||||
64,
|
||||
'0'
|
||||
) AS input,
|
||||
utils.udf_json_rpc_call(
|
||||
'eth_call',
|
||||
[{'to': contract_address, 'from': null, 'data': input}, utils.udf_int_to_hex(latest_block)],
|
||||
concat_ws(
|
||||
'-',
|
||||
contract_address,
|
||||
input,
|
||||
latest_block
|
||||
)
|
||||
) AS rpc_request
|
||||
FROM
|
||||
all_reads
|
||||
),
|
||||
batch_reads AS (
|
||||
SELECT
|
||||
ARRAY_AGG(rpc_request) AS batch_rpc_request
|
||||
FROM
|
||||
ready_reads
|
||||
),
|
||||
node_call AS (
|
||||
SELECT
|
||||
*,
|
||||
live.udf_api(
|
||||
'POST',
|
||||
CONCAT(
|
||||
'{service}',
|
||||
'/',
|
||||
'{Authentication}',
|
||||
'/ext/bc/C/rpc'
|
||||
),{},
|
||||
batch_rpc_request,
|
||||
'Vault/prod/avalanche/quicknode/mainnet'
|
||||
) AS response
|
||||
FROM
|
||||
batch_reads
|
||||
WHERE
|
||||
EXISTS (
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
ready_reads
|
||||
LIMIT
|
||||
1
|
||||
)
|
||||
), flat_responses AS (
|
||||
SELECT
|
||||
VALUE :id :: STRING AS call_id,
|
||||
VALUE :result :: STRING AS read_result
|
||||
FROM
|
||||
node_call,
|
||||
LATERAL FLATTEN (
|
||||
input => response :data
|
||||
)
|
||||
)
|
||||
SELECT
|
||||
SPLIT_PART(
|
||||
call_id,
|
||||
'-',
|
||||
1
|
||||
) AS contract_address,
|
||||
SPLIT_PART(
|
||||
call_id,
|
||||
'-',
|
||||
3
|
||||
) AS block_number,
|
||||
LEFT(SPLIT_PART(call_id, '-', 2), 10) AS function_sig,
|
||||
NULL AS function_input,
|
||||
read_result,
|
||||
SYSDATE() :: TIMESTAMP AS _inserted_timestamp
|
||||
FROM
|
||||
flat_responses
|
||||
@ -1,18 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: bronze_api__token_reads
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- CONTRACT_ADDRESS
|
||||
- FUNCTION_SIG
|
||||
columns:
|
||||
- name: _INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: day
|
||||
interval: 1
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_NTZ
|
||||
@ -1,25 +0,0 @@
|
||||
{{ config(
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
system_created_at,
|
||||
insert_date,
|
||||
blockchain,
|
||||
address,
|
||||
creator,
|
||||
label_type,
|
||||
label_subtype,
|
||||
address_name,
|
||||
project_name,
|
||||
_is_deleted,
|
||||
modified_timestamp,
|
||||
labels_combined_id
|
||||
FROM
|
||||
{{ source(
|
||||
'crosschain_silver',
|
||||
'labels_combined'
|
||||
) }}
|
||||
WHERE
|
||||
blockchain = 'avalanche'
|
||||
AND address LIKE '0x%'
|
||||
@ -1,26 +0,0 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
asset_id,
|
||||
symbol,
|
||||
NAME,
|
||||
decimals,
|
||||
blockchain,
|
||||
is_deprecated,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_native_asset_metadata_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'crosschain_silver',
|
||||
'complete_native_asset_metadata'
|
||||
) }}
|
||||
WHERE
|
||||
blockchain = 'avalanche'
|
||||
AND symbol = 'AVAX'
|
||||
@ -1,29 +0,0 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
HOUR,
|
||||
asset_id,
|
||||
symbol,
|
||||
NAME,
|
||||
decimals,
|
||||
price,
|
||||
blockchain,
|
||||
is_imputed,
|
||||
is_deprecated,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_native_prices_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'crosschain_silver',
|
||||
'complete_native_prices'
|
||||
) }}
|
||||
WHERE
|
||||
blockchain = 'avalanche'
|
||||
AND symbol = 'AVAX'
|
||||
@ -1,28 +0,0 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
asset_id,
|
||||
token_address,
|
||||
NAME,
|
||||
symbol,
|
||||
platform,
|
||||
platform_id,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_provider_asset_metadata_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'crosschain_silver',
|
||||
'complete_provider_asset_metadata'
|
||||
) }}
|
||||
WHERE
|
||||
platform IN (
|
||||
'Avalanche',
|
||||
'Avalanche C-Chain'
|
||||
) -- platforms specific to Avalanche
|
||||
@ -1,24 +0,0 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
asset_id,
|
||||
recorded_hour,
|
||||
OPEN,
|
||||
high,
|
||||
low,
|
||||
CLOSE,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_provider_prices_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'crosschain_silver',
|
||||
'complete_provider_prices'
|
||||
) }}
|
||||
-- prices for all ids
|
||||
@ -1,28 +0,0 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
token_address,
|
||||
asset_id,
|
||||
symbol,
|
||||
NAME,
|
||||
decimals,
|
||||
blockchain,
|
||||
blockchain_name,
|
||||
blockchain_id,
|
||||
is_deprecated,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_token_asset_metadata_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'crosschain_silver',
|
||||
'complete_token_asset_metadata'
|
||||
) }}
|
||||
WHERE
|
||||
blockchain = 'avalanche'
|
||||
@ -1,31 +0,0 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
HOUR,
|
||||
token_address,
|
||||
asset_id,
|
||||
symbol,
|
||||
NAME,
|
||||
decimals,
|
||||
price,
|
||||
blockchain,
|
||||
blockchain_name,
|
||||
blockchain_id,
|
||||
is_imputed,
|
||||
is_deprecated,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_token_prices_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'crosschain_silver',
|
||||
'complete_token_prices'
|
||||
) }}
|
||||
WHERE
|
||||
blockchain = 'avalanche'
|
||||
@ -1,7 +1,7 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
{{ fsc_evm.streamline_external_table_query(
|
||||
{{ fsc_utils.streamline_external_table_query_v2(
|
||||
model = "dexalot_traces",
|
||||
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )"
|
||||
) }}
|
||||
|
||||
@ -31,6 +31,9 @@ SELECT
|
||||
state_root,
|
||||
transactions_root,
|
||||
logs_bloom,
|
||||
blocks_id AS fact_blocks_id,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
OBJECT_CONSTRUCT(
|
||||
'baseFeePerGas',
|
||||
base_fee_per_gas,
|
||||
@ -70,12 +73,9 @@ SELECT
|
||||
transactions_root,
|
||||
'uncles',
|
||||
uncles
|
||||
) AS block_header_json,
|
||||
blocks_id AS fact_blocks_id,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
'dexalot' AS blockchain,
|
||||
hash
|
||||
) AS block_header_json, --deprecate
|
||||
'dexalot' AS blockchain, --deprecate
|
||||
hash --deprecate
|
||||
FROM
|
||||
{{ ref('silver_dexalot__blocks') }} A
|
||||
|
||||
|
||||
@ -10,8 +10,6 @@ models:
|
||||
description: '{{ doc("evm_block_timestamp") }}'
|
||||
- name: NETWORK
|
||||
description: '{{ doc("evm_network") }}'
|
||||
- name: BLOCKCHAIN
|
||||
description: '{{ doc("evm_column_deprecation_notice_blockchain") }}'
|
||||
- name: MINER
|
||||
description: '{{ doc("evm_miner") }}'
|
||||
- name: NONCE
|
||||
@ -32,8 +30,6 @@ models:
|
||||
description: '{{ doc("evm_base_fee_per_gas") }}'
|
||||
- name: BLOCK_HASH
|
||||
description: '{{ doc("evm_blocks_hash") }}'
|
||||
- name: HASH
|
||||
description: '{{ doc("evm_column_deprecation_notice_hash") }}'
|
||||
- name: PARENT_HASH
|
||||
description: '{{ doc("evm_parent_hash") }}'
|
||||
- name: RECEIPTS_ROOT
|
||||
@ -50,8 +46,6 @@ models:
|
||||
description: '{{ doc("evm_transactions_root") }}'
|
||||
- name: LOGS_BLOOM
|
||||
description: '{{ doc("evm_logs_bloom") }}'
|
||||
- name: BLOCK_HEADER_JSON
|
||||
description: '{{ doc("evm_column_deprecation_notice_block_header_json") }}'
|
||||
- name: BASE_FEE_PER_GAS
|
||||
description: '{{ doc("evm_base_fee_per_gas") }}'
|
||||
- name: FACT_BLOCKS_ID
|
||||
|
||||
@ -28,8 +28,7 @@ SELECT
|
||||
logs_id AS fact_event_logs_id,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
tx_status,
|
||||
-- deprecate
|
||||
tx_status, -- deprecate
|
||||
_log_id -- deprecate
|
||||
FROM
|
||||
{{ ref('silver_dexalot__logs') }}
|
||||
|
||||
@ -28,10 +28,6 @@ models:
|
||||
description: '{{ doc("evm_logs_data") }}'
|
||||
- name: EVENT_REMOVED
|
||||
description: '{{ doc("evm_event_removed") }}'
|
||||
- name: _LOG_ID
|
||||
description: '{{ doc("evm_column_deprecation_notice_log_id") }}'
|
||||
- name: TX_STATUS
|
||||
description: '{{ doc("evm_column_deprecation_notice_tx_status") }}'
|
||||
- name: TX_SUCCEEDED
|
||||
description: '{{ doc("evm_tx_succeeded") }}'
|
||||
- name: ORIGIN_FUNCTION_SIGNATURE
|
||||
|
||||
@ -23,9 +23,9 @@ models:
|
||||
- name: VALUE
|
||||
description: '{{ doc("dexalot_traces_value") }}'
|
||||
- name: VALUE_PRECISE_RAW
|
||||
description: '{{ doc("precise_amount_unadjusted") }}'
|
||||
description: '{{ doc("evm_precise_amount_unadjusted") }}'
|
||||
- name: VALUE_PRECISE
|
||||
description: '{{ doc("precise_amount_adjusted") }}'
|
||||
description: '{{ doc("evm_precise_amount_adjusted") }}'
|
||||
- name: VALUE_HEX
|
||||
description: '{{ doc("dexalot_traces_value_hex") }}'
|
||||
- name: GAS
|
||||
@ -55,8 +55,8 @@ models:
|
||||
- name: TRACE_INDEX
|
||||
description: '{{ doc("dexalot_traces_trace_index") }}'
|
||||
- name: FACT_TRACES_ID
|
||||
description: '{{ doc("pk") }}'
|
||||
description: '{{ doc("evm_pk") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("inserted_timestamp") }}'
|
||||
description: '{{ doc("evm_inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: '{{ doc("modified_timestamp") }}'
|
||||
description: '{{ doc("evm_modified_timestamp") }}'
|
||||
@ -37,8 +37,7 @@ SELECT
|
||||
transactions_id AS fact_transactions_id,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
block_hash,
|
||||
-- deprecate
|
||||
block_hash, -- deprecate
|
||||
position -- deprecate
|
||||
FROM
|
||||
{{ ref('silver_dexalot__transactions') }}
|
||||
|
||||
@ -8,14 +8,10 @@ models:
|
||||
description: '{{ doc("dexalot_block_number") }}'
|
||||
- name: BLOCK_TIMESTAMP
|
||||
description: '{{ doc("dexalot_block_timestamp") }}'
|
||||
- name: BLOCK_HASH
|
||||
description: '{{ doc("evm_column_deprecation_notice_block_hash") }}'
|
||||
- name: TX_HASH
|
||||
description: '{{ doc("dexalot_tx_hash") }}'
|
||||
- name: NONCE
|
||||
description: '{{ doc("dexalot_tx_nonce") }}'
|
||||
- name: POSITION
|
||||
description: '{{ doc("evm_column_deprecation_notice_position") }}'
|
||||
- name: TX_POSITION
|
||||
description: '{{ doc("dexalot_tx_position") }}'
|
||||
- name: FROM_ADDRESS
|
||||
@ -25,13 +21,13 @@ models:
|
||||
- name: VALUE
|
||||
description: '{{ doc("dexalot_value") }}'
|
||||
- name: VALUE_PRECISE_RAW
|
||||
description: '{{ doc("precise_amount_unadjusted") }}'
|
||||
description: '{{ doc("evm_precise_amount_unadjusted") }}'
|
||||
- name: VALUE_PRECISE
|
||||
description: '{{ doc("precise_amount_adjusted") }}'
|
||||
description: '{{ doc("evm_precise_amount_adjusted") }}'
|
||||
- name: TX_FEE
|
||||
description: '{{ doc("dexalot_tx_fee") }}'
|
||||
- name: TX_FEE_PRECISE
|
||||
description: '{{ doc("tx_fee_precise") }}'
|
||||
description: '{{ doc("evm_tx_fee_precise") }}'
|
||||
- name: GAS_PRICE
|
||||
description: '{{ doc("dexalot_tx_gas_price") }}'
|
||||
- name: EFFECTIVE_GAS_PRICE
|
||||
@ -59,8 +55,8 @@ models:
|
||||
- name: V
|
||||
description: The v value of the transaction signature.
|
||||
- name: FACT_TRANSACTIONS_ID
|
||||
description: '{{ doc("pk") }}'
|
||||
description: '{{ doc("evm_pk") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("inserted_timestamp") }}'
|
||||
description: '{{ doc("evm_inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: '{{ doc("modified_timestamp") }}'
|
||||
description: '{{ doc("evm_modified_timestamp") }}'
|
||||
@ -11,10 +11,7 @@ SELECT
|
||||
''
|
||||
) AS block_number_hex
|
||||
FROM
|
||||
{{ source(
|
||||
'crosschain_silver',
|
||||
'number_sequence'
|
||||
) }}
|
||||
{{ ref('admin__number_sequence') }}
|
||||
WHERE
|
||||
_id <= (
|
||||
SELECT
|
||||
|
||||
@ -1,4 +0,0 @@
|
||||
{% docs borrow_symbol %}
|
||||
|
||||
The symbol of the asset/collateral that is payed or received, depending on the action
|
||||
{% enddocs %}
|
||||
@ -1,9 +0,0 @@
|
||||
{% docs borrow_action %}
|
||||
|
||||
The action that the user is taking.
|
||||
Borrow: user is borrowing an asset
|
||||
Repay: user is repaying the asset that they have borrowed in a previous loan
|
||||
Add collateral: user is depositing collateral for their loan. This happens some times in the same transaction as the borrowing transaction and some times in a separate transaction.
|
||||
Remove collateral: user is withdrawing collateral. This happens some times in the same transaction as the borrowing transaction and some times in a separate transaction.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,9 +0,0 @@
|
||||
{% docs borrow_amount %}
|
||||
|
||||
The meaning depends on the action:
|
||||
Borrow: The amount of the asset that the user is borrowing or
|
||||
Repay: The amount of the asset that the user is repaying
|
||||
Add collateral: The amount of collateral that the user is depositing
|
||||
Remove collateral: The amount of collateral that the user is withdrawing
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,8 +0,0 @@
|
||||
{% docs borrow_amount_usd %}
|
||||
|
||||
The meaning depends on the action:
|
||||
Borrow: The amount of the asset in USD that the user is borrowing or
|
||||
Repay: The amount of the asset in USD that the user is repaying
|
||||
Add collateral: The amount of collateral in USD that the user is depositing
|
||||
Remove collateral: The amount of collateral in USD that the user is withdrawing
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs borrow_asset %}
|
||||
|
||||
The address of the asset/collateral token that is being borrowed/repayed/deposited etc. depending on the action
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs collateral_address %}
|
||||
|
||||
The address of the asset that is used for collateral when borrowing funds.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs collateral_symbol %}
|
||||
|
||||
The symbol of the asset that is used for collateral when borrowing funds.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_depositor %}
|
||||
|
||||
Its the address of the user who is depositing for lending or withdrawing, depending on the action.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,4 +0,0 @@
|
||||
{% docs lending_asset_address %}
|
||||
|
||||
The address of the asset in the token pair. This asset is either deposited ot withdrawn for lending purposes.
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_pool_address %}
|
||||
|
||||
The address of the lending pool. For sushi this will be the address of the kashi pair.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_symbol %}
|
||||
|
||||
The symbol of the asset that is lent or withdrawn, depending on the action
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,7 +0,0 @@
|
||||
{% docs lending_action %}
|
||||
|
||||
The action that the user is taking.
|
||||
Deposit: user is depositing funds to be used for lending
|
||||
Withdraw: user has changed their mind and are no longer willing to lend, so they withdraw their asset
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_amount %}
|
||||
|
||||
The amount of the asset that the user is depositing or withdrawing, depending on the action.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_amount_usd %}
|
||||
|
||||
The amount of the asset that the user is depositing or withdrawing, depending on the action.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_asset %}
|
||||
|
||||
The address of the asset (token) that is being deposited/withdrawn, depending on the action
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_borrower_is_a_contract %}
|
||||
|
||||
If the depositor of collateral is a contract then its a Yes, if the depositor of collateral is a normal address it is a No.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_lender_is_a_contract %}
|
||||
|
||||
If the depositor is a contract then its a Yes, if the depositor is a normal address it is a No.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_lending_pool %}
|
||||
|
||||
The name of the lending pool.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_origin_from_address %}
|
||||
|
||||
The address of the user who initiates the transaction.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_origin_to_address %}
|
||||
|
||||
The person who initiates the depositing transaction has to interact with this address. This address belongs to the lending platform or directs the transactio there.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs avax_block_header_json %}
|
||||
|
||||
This JSON column contains the block header details.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs avax_blockchain %}
|
||||
|
||||
The blockchain on which transactions are being confirmed.
|
||||
|
||||
{% enddocs %}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user