mirror of
https://github.com/FlipsideCrypto/bsc-models.git
synced 2026-02-06 11:46:47 +00:00
AN-5994/bsc-migration (#330)
* initial set up * macros and workflows * wh * defi and nft tags * sl * docs * package * package * curated tweaks
This commit is contained in:
parent
31755f81ea
commit
eb4a3c2e17
27
.github/workflows/dbt_alter_all_gha_tasks.yml
vendored
Normal file
27
.github/workflows/dbt_alter_all_gha_tasks.yml
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
name: dbt_alter_all_gha_tasks
|
||||
run-name: dbt_alter_all_gha_tasks
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
task_action:
|
||||
type: choice
|
||||
description: Action to perform on all tasks
|
||||
required: true
|
||||
options:
|
||||
- RESUME
|
||||
- SUSPEND
|
||||
default: RESUME
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_alter_all_gha_tasks.yml@pre-release/v4-beta
|
||||
with:
|
||||
task_action: ${{ inputs.task_action }}
|
||||
target: prod
|
||||
secrets: inherit
|
||||
53
.github/workflows/dbt_alter_gha_task.yml
vendored
53
.github/workflows/dbt_alter_gha_task.yml
vendored
@ -1,53 +0,0 @@
|
||||
name: dbt_alter_gha_task
|
||||
run-name: dbt_alter_gha_task
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
workflow_name:
|
||||
type: string
|
||||
description: Name of the workflow to perform the action on, no .yml extension
|
||||
required: true
|
||||
task_action:
|
||||
type: choice
|
||||
description: Action to perform
|
||||
required: true
|
||||
options:
|
||||
- SUSPEND
|
||||
- RESUME
|
||||
default: SUSPEND
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_alter_gha_tasks.yml@AN-4374/upgrade-dbt-1.7
|
||||
with:
|
||||
workflow_name: |
|
||||
${{ inputs.workflow_name }}
|
||||
task_action: |
|
||||
${{ inputs.task_action }}
|
||||
environment: workflow_prod
|
||||
secrets: inherit
|
||||
|
||||
notify-failure:
|
||||
needs: [called_workflow_template]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
32
.github/workflows/dbt_alter_gha_tasks.yml
vendored
Normal file
32
.github/workflows/dbt_alter_gha_tasks.yml
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
name: dbt_alter_gha_tasks
|
||||
run-name: dbt_alter_gha_tasks
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
workflow_name:
|
||||
type: string
|
||||
description: Name of the workflow to perform the action on, no .yml extension
|
||||
required: true
|
||||
task_action:
|
||||
type: choice
|
||||
description: Action to perform
|
||||
required: true
|
||||
options:
|
||||
- SUSPEND
|
||||
- RESUME
|
||||
default: SUSPEND
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_alter_gha_tasks.yml@pre-release/v4-beta
|
||||
with:
|
||||
workflow_name: ${{ inputs.workflow_name }}
|
||||
task_action: ${{ inputs.task_action }}
|
||||
target: prod
|
||||
secrets: inherit
|
||||
19
.github/workflows/dbt_deploy_new_workflows.yml
vendored
Normal file
19
.github/workflows/dbt_deploy_new_workflows.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: dbt_deploy_new_workflows
|
||||
run-name: dbt_deploy_new_workflows
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Deploy New Github Actions
|
||||
command: |
|
||||
make deploy_new_gha_tasks DBT_TARGET=prod
|
||||
secrets: inherit
|
||||
71
.github/workflows/dbt_docs_update.yml
vendored
71
.github/workflows/dbt_docs_update.yml
vendored
@ -5,75 +5,10 @@ on:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
USE_VARS: "${{ vars.USE_VARS }}"
|
||||
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
|
||||
DBT_VERSION: "${{ vars.DBT_VERSION }}"
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: refresh ddl for datashare
|
||||
run: |
|
||||
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
|
||||
- name: checkout docs branch
|
||||
run: |
|
||||
git checkout -B docs origin/main
|
||||
- name: generate dbt docs
|
||||
run: dbt docs generate -t prod
|
||||
|
||||
- name: move files to docs directory
|
||||
run: |
|
||||
mkdir -p ./docs
|
||||
cp target/{catalog.json,manifest.json,index.html} docs/
|
||||
- name: clean up target directory
|
||||
run: dbt clean
|
||||
|
||||
- name: check for changes
|
||||
run: git status
|
||||
|
||||
- name: stage changed files
|
||||
run: git add .
|
||||
|
||||
- name: commit changed files
|
||||
run: |
|
||||
git config user.email "abc@xyz"
|
||||
git config user.name "github-actions"
|
||||
git commit -am "Auto-update docs"
|
||||
- name: push changes to docs
|
||||
run: |
|
||||
git push -f --set-upstream origin docs
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_docs_update.yml@pre-release/v4-beta
|
||||
secrets: inherit
|
||||
30
.github/workflows/dbt_integration_test.yml
vendored
30
.github/workflows/dbt_integration_test.yml
vendored
@ -3,38 +3,20 @@ run-name: ${{ github.event.inputs.branch }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
environment:
|
||||
required: true
|
||||
type: string
|
||||
required: true
|
||||
|
||||
concurrency: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
prepare_vars:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: ${{ inputs.environment }}
|
||||
outputs:
|
||||
warehouse: ${{ steps.set_outputs.outputs.warehouse }}
|
||||
steps:
|
||||
- name: Set warehouse output
|
||||
id: set_outputs
|
||||
run: |
|
||||
echo "warehouse=${{ vars.WAREHOUSE }}" >> $GITHUB_OUTPUT
|
||||
called_workflow_template:
|
||||
needs: prepare_vars
|
||||
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt.yml@main
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_integration_test.yml@pre-release/v4-beta
|
||||
with:
|
||||
command: >
|
||||
target: ${{ inputs.environment }}
|
||||
command: |
|
||||
dbt test --selector 'integration_tests'
|
||||
environment: ${{ inputs.environment }}
|
||||
warehouse: ${{ needs.prepare_vars.outputs.warehouse }}
|
||||
secrets: inherit
|
||||
|
||||
notify-failure:
|
||||
needs: [called_workflow_template]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
56
.github/workflows/dbt_run_abi_refresh.yml
vendored
56
.github/workflows/dbt_run_abi_refresh.yml
vendored
@ -1,56 +0,0 @@
|
||||
name: dbt_run_abi_refresh
|
||||
run-name: dbt_run_abi_refresh
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Update ABI models
|
||||
run: |
|
||||
dbt run -m "bsc_models,tag:abis"
|
||||
|
||||
- name: Kick off decoded logs history, if there are new ABIs from users
|
||||
run: |
|
||||
dbt run-operation run_decoded_logs_history
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
51
.github/workflows/dbt_run_adhoc.yml
vendored
51
.github/workflows/dbt_run_adhoc.yml
vendored
@ -1,12 +1,12 @@
|
||||
name: dbt_run_adhoc
|
||||
run-name: dbt_run_adhoc
|
||||
run-name: ${{ inputs.dbt_command }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
environment:
|
||||
target:
|
||||
type: choice
|
||||
description: DBT Run Environment
|
||||
required: true
|
||||
@ -26,49 +26,18 @@ on:
|
||||
default: DBT
|
||||
dbt_command:
|
||||
type: string
|
||||
description: "DBT Run Command"
|
||||
description: 'DBT Run Command'
|
||||
required: true
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ inputs.warehouse }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_${{ inputs.environment }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
${{ inputs.dbt_command }}
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
warehouse: ${{ inputs.warehouse }}
|
||||
target: ${{ inputs.target }}
|
||||
command_name: Run DBT Command
|
||||
command: ${{ inputs.dbt_command }}
|
||||
secrets: inherit
|
||||
78
.github/workflows/dbt_run_dev_refresh.yml
vendored
78
.github/workflows/dbt_run_dev_refresh.yml
vendored
@ -3,81 +3,13 @@ run-name: dbt_run_dev_refresh
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs "at 1:01 UTC" (see https://crontab.guru)
|
||||
- cron: "1 1 * * 1"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs_refresh:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run-operation run_sp_create_prod_clone
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs_refresh]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
|
||||
run_dbt_jobs_udfs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: run_dbt_jobs_refresh
|
||||
environment:
|
||||
name: workflow_dev
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run Recreate UDFs
|
||||
run: |
|
||||
dbt run-operation fsc_utils.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
|
||||
dbt run -s livequery_models.deploy.core._live --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
|
||||
|
||||
notify-failure2:
|
||||
needs: [run_dbt_jobs_udfs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_dev_refresh.yml@pre-release/v4-beta
|
||||
secrets: inherit
|
||||
49
.github/workflows/dbt_run_full_observability.yml
vendored
49
.github/workflows/dbt_run_full_observability.yml
vendored
@ -3,50 +3,21 @@ run-name: dbt_run_full_observability
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs “At 08:00 on day-of-month 1.” (see https://crontab.guru)
|
||||
- cron: "0 8 1 * *"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod_2xl
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
warehouse: DBT_EMERGENCY
|
||||
command_name: Run Observability Models
|
||||
command: |
|
||||
dbt run --threads 2 --vars '{"MAIN_OBSERV_FULL_TEST_ENABLED":True}' -m "fsc_evm,tag:observability"
|
||||
secrets: inherit
|
||||
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --threads 2 --vars '{"OBSERV_FULL_TEST":True}' -m "bsc_models,tag:observability"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
|
||||
49
.github/workflows/dbt_run_heal_models.yml
vendored
49
.github/workflows/dbt_run_heal_models.yml
vendored
@ -3,50 +3,17 @@ run-name: dbt_run_heal_models
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs at 04:10 on Wednesday (see https://crontab.guru)
|
||||
- cron: "10 4 * * 3"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "bsc_models,tag:heal" --vars '{"HEAL_MODEL":True}'
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
command_name: Run Heal Models
|
||||
command: |
|
||||
dbt run -m "$PROJECT_NAME,tag:heal" --vars '{"HEAL_MODEL":True}'
|
||||
secrets: inherit
|
||||
59
.github/workflows/dbt_run_operation_reorg.yml
vendored
59
.github/workflows/dbt_run_operation_reorg.yml
vendored
@ -1,59 +0,0 @@
|
||||
name: dbt_run_operation_reorg
|
||||
run-name: dbt_run_operation_reorg
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs at minute 25 every Sunday (see https://crontab.guru)
|
||||
- cron: "25 0 * * 0"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: List reorg models
|
||||
id: list_models
|
||||
run: |
|
||||
reorg_model_list=$(dbt list --select "bsc_models,tag:reorg" --resource-type model --output name | grep '__' | awk -F'.' '{print $NF}' | tr '\n' ',' | sed 's/,$//')
|
||||
echo "model_list=$reorg_model_list" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Execute block_reorg macro
|
||||
run: |
|
||||
dbt run-operation fsc_utils.block_reorg --args "{reorg_model_list: '${{ steps.list_models.outputs.model_list }}', hours: '169'}" && awk '/SQL status/ {print; next} /DELETE FROM/{getline; print} /\/\* {/ {print}' logs/dbt.log
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
@ -1,51 +0,0 @@
|
||||
name: dbt_run_overflowed_receipts
|
||||
run-name: dbt_run_overflowed_receipts
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "bsc_models,tag:overflowed_receipts" --vars '{"OVERFLOWED_RECEIPTS":True}'
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
51
.github/workflows/dbt_run_overflowed_traces.yml
vendored
51
.github/workflows/dbt_run_overflowed_traces.yml
vendored
@ -1,51 +0,0 @@
|
||||
name: dbt_run_overflowed_traces
|
||||
run-name: dbt_run_overflowed_traces
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "bsc_models,tag:overflowed_traces" --vars '{"OVERFLOWED_TRACES":True}'
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
22
.github/workflows/dbt_run_scheduled_abis.yml
vendored
Normal file
22
.github/workflows/dbt_run_scheduled_abis.yml
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
name: dbt_run_scheduled_abis
|
||||
run-name: dbt_run_scheduled_abis
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run ABI Models
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:gold,tag:abis"
|
||||
command_name_2: Kick off decoded logs history, if there are new user submitted ABIs
|
||||
command_2: |
|
||||
dbt run-operation fsc_evm.run_decoded_logs_history
|
||||
secrets: inherit
|
||||
44
.github/workflows/dbt_run_scheduled_curated.yml
vendored
44
.github/workflows/dbt_run_scheduled_curated.yml
vendored
@ -6,46 +6,14 @@ on:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "bsc_models,tag:curated"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
command_name: Run Curated Models
|
||||
command: |
|
||||
dbt run -m "$PROJECT_NAME,tag:curated" "fsc_evm,tag:curated"
|
||||
secrets: inherit
|
||||
19
.github/workflows/dbt_run_scheduled_decoder.yml
vendored
Normal file
19
.github/workflows/dbt_run_scheduled_decoder.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: dbt_run_scheduled_decoder
|
||||
run-name: dbt_run_scheduled_decoder
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Decoder Models
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:bronze,tag:decoded_logs" "fsc_evm,tag:silver,tag:decoded_logs" "fsc_evm,tag:gold,tag:decoded_logs"
|
||||
secrets: inherit
|
||||
19
.github/workflows/dbt_run_scheduled_main.yml
vendored
Normal file
19
.github/workflows/dbt_run_scheduled_main.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: dbt_run_scheduled_main
|
||||
run-name: dbt_run_scheduled_main
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Main Models
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:bronze,tag:core" "fsc_evm,tag:silver,tag:core" "fsc_evm,tag:gold,tag:core" "fsc_evm,tag:silver,tag:prices" "fsc_evm,tag:gold,tag:prices" "fsc_evm,tag:silver,tag:labels" "fsc_evm,tag:gold,tag:labels" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
|
||||
secrets: inherit
|
||||
@ -1,51 +0,0 @@
|
||||
name: dbt_run_scheduled_non_realtime
|
||||
run-name: dbt_run_scheduled_non_realtime
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "bsc_models,tag:non_realtime" "bsc_models,tag:streamline_decoded_logs_complete" "bsc_models,tag:streamline_decoded_logs_realtime"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
20
.github/workflows/dbt_run_scheduled_scores.yml
vendored
Normal file
20
.github/workflows/dbt_run_scheduled_scores.yml
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
|
||||
name: dbt_run_scheduled_scores
|
||||
run-name: dbt_run_scheduled_scores
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Scores Models
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:scores"
|
||||
secrets: inherit
|
||||
@ -6,50 +6,15 @@ on:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "bsc_models,tag:streamline_core_complete" "bsc_models,tag:streamline_core_realtime" "bsc_models,tag:streamline_core_complete_receipts" "bsc_models,tag:streamline_core_realtime_receipts" "bsc_models,tag:streamline_core_complete_confirm_blocks" "bsc_models,tag:streamline_core_realtime_confirm_blocks"
|
||||
|
||||
- name: Run Chainhead Tests
|
||||
run: |
|
||||
dbt test -m "bsc_models,tag:chainhead"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
command_name: Run Chainhead Models
|
||||
command: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:chainhead" "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash"
|
||||
dbt test -m "fsc_evm,tag:chainhead"
|
||||
secrets: inherit
|
||||
@ -1,56 +0,0 @@
|
||||
name: dbt_run_streamline_decoded_logs_history
|
||||
run-name: dbt_run_streamline_decoded_logs_history
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Update complete table
|
||||
run: |
|
||||
dbt run -m "bsc_models,tag:streamline_decoded_logs_complete"
|
||||
|
||||
- name: Decode historical logs
|
||||
run: |
|
||||
dbt run-operation decoded_logs_history --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
51
.github/workflows/dbt_run_streamline_decoder.yml
vendored
51
.github/workflows/dbt_run_streamline_decoder.yml
vendored
@ -1,51 +0,0 @@
|
||||
name: dbt_run_streamline_decoder
|
||||
run-name: dbt_run_streamline_decoder
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "bsc_models,tag:decoded_logs"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
22
.github/workflows/dbt_run_streamline_decoder_history.yml
vendored
Normal file
22
.github/workflows/dbt_run_streamline_decoder_history.yml
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
name: dbt_run_streamline_decoder_history
|
||||
run-name: dbt_run_streamline_decoder_history
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run Decoder Complete
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete"
|
||||
command_name_2: Run Streamline Decoder History
|
||||
command_2: |
|
||||
dbt run-operation fsc_evm.decoded_logs_history --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
|
||||
secrets: inherit
|
||||
19
.github/workflows/dbt_run_streamline_history.yml
vendored
Normal file
19
.github/workflows/dbt_run_streamline_history.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: dbt_run_streamline_history
|
||||
run-name: dbt_run_streamline_history
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
command_name: Run History Models
|
||||
command: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:history" --exclude "fsc_evm,tag:receipts_by_hash"
|
||||
secrets: inherit
|
||||
@ -1,76 +0,0 @@
|
||||
name: dbt_run_streamline_history_adhoc
|
||||
run-name: dbt_run_streamline_history_adhoc
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
environment:
|
||||
type: choice
|
||||
description: DBT Run Environment
|
||||
required: true
|
||||
options:
|
||||
- dev
|
||||
- prod
|
||||
- prod_backfill
|
||||
default: dev
|
||||
warehouse:
|
||||
type: choice
|
||||
description: Snowflake warehouse
|
||||
required: true
|
||||
options:
|
||||
- DBT
|
||||
- DBT_CLOUD
|
||||
- DBT_EMERGENCY
|
||||
default: DBT
|
||||
dbt_command:
|
||||
type: choice
|
||||
description: "DBT Run Command"
|
||||
required: true
|
||||
options:
|
||||
- dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "bsc_models,tag:streamline_core_complete" "bsc_models,tag:streamline_core_history" "bsc_models,tag:streamline_core_complete_receipts" "bsc_models,tag:streamline_core_history_receipts" "bsc_models,tag:streamline_core_complete_confirm_blocks" "bsc_models,tag:streamline_core_history_confirm_blocks"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ inputs.warehouse }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_${{ inputs.environment }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
${{ inputs.dbt_command }}
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
53
.github/workflows/dbt_test_daily.yml
vendored
53
.github/workflows/dbt_test_daily.yml
vendored
@ -3,50 +3,21 @@ run-name: dbt_test_daily
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs "at 9:00 UTC" (see https://crontab.guru)
|
||||
- cron: "0 9 * * *"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt test --exclude "bsc_models,tag:full_test" "bsc_models,tag:recent_test" "bsc_models,tag:gha_tasks" livequery_models
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
target: test
|
||||
command_name: Build Daily Testing Views
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:daily_test"
|
||||
command_name_2: Run Daily Tests (all tests excluding full, recent and misc. others)
|
||||
command_2: |
|
||||
dbt test --exclude "fsc_evm,tag:full_test" "fsc_evm,tag:recent_test" "fsc_evm,tag:gha_tasks" livequery_models
|
||||
secrets: inherit
|
||||
47
.github/workflows/dbt_test_intraday.yml
vendored
47
.github/workflows/dbt_test_intraday.yml
vendored
@ -6,47 +6,16 @@ on:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "bsc_models,tag:observability"
|
||||
dbt test -m "bsc_models,tag:recent_test"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
target: test
|
||||
command_name: Run Observability & Recent Tests
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:observability"
|
||||
dbt test -m "fsc_evm,tag:recent_test"
|
||||
secrets: inherit
|
||||
53
.github/workflows/dbt_test_monthly.yml
vendored
53
.github/workflows/dbt_test_monthly.yml
vendored
@ -3,50 +3,21 @@ run-name: dbt_test_monthly
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs “28th of month at 7AM” (see https://crontab.guru)
|
||||
- cron: "0 7 28 * *"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod_2xl
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt test -m "bsc_models,tag:full_test"
|
||||
|
||||
notify-failure:
|
||||
needs: [run_dbt_jobs]
|
||||
if: failure()
|
||||
uses: ./.github/workflows/slack_notify.yml
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
warehouse: DBT_EMERGENCY
|
||||
command_name: Build Full Testing Views
|
||||
command: |
|
||||
dbt run -m "fsc_evm,tag:full_test"
|
||||
command_name_2: Run Full Tests
|
||||
command_2: |
|
||||
dbt test -m "fsc_evm,tag:full_test"
|
||||
secrets: inherit
|
||||
27
.github/workflows/slack_notify.yml
vendored
27
.github/workflows/slack_notify.yml
vendored
@ -1,27 +0,0 @@
|
||||
name: Slack Notification
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
notify:
|
||||
runs-on: ubuntu-latest
|
||||
environment: workflow_prod
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install requests
|
||||
|
||||
- name: Send Slack notification
|
||||
run: python python/slack_alert.py
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@ -18,4 +18,4 @@ dbt-env/
|
||||
.env
|
||||
.user.yml
|
||||
|
||||
package-lock.yml
|
||||
/package-lock.yml
|
||||
109
Makefile
Normal file
109
Makefile
Normal file
@ -0,0 +1,109 @@
|
||||
DBT_TARGET ?= dev
|
||||
RECEIPTS_BY_HASH_ENABLED ?= false
|
||||
|
||||
cleanup_time:
|
||||
@set -e; \
|
||||
rm -f package-lock.yml && dbt clean && dbt deps
|
||||
|
||||
deploy_gha_workflows_table:
|
||||
@set -e; \
|
||||
echo "Collecting workflow names..." ; \
|
||||
WORKFLOW_VALUES="" ; \
|
||||
for file in $$(find .github/workflows -name "*.yml" -type f); do \
|
||||
filename=$$(basename "$$file" .yml) ; \
|
||||
if [ -z "$$WORKFLOW_VALUES" ]; then \
|
||||
WORKFLOW_VALUES="('$$filename')" ; \
|
||||
else \
|
||||
WORKFLOW_VALUES="$$WORKFLOW_VALUES,('$$filename')" ; \
|
||||
fi ; \
|
||||
done ; \
|
||||
echo "Found workflows: $$WORKFLOW_VALUES" ; \
|
||||
dbt run-operation create_workflow_table --args "{\"workflow_values\": \"$$WORKFLOW_VALUES\"}" -t $(DBT_TARGET)
|
||||
|
||||
deploy_gha_tasks:
|
||||
@set -e; \
|
||||
make deploy_gha_workflows_table DBT_TARGET=$(DBT_TARGET); \
|
||||
dbt run -s livequery_models.deploy.marketplace.github --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.create_gha_tasks --vars '{"RESUME_GHA_TASKS":True}' -t $(DBT_TARGET)
|
||||
|
||||
deploy_new_gha_tasks:
|
||||
@set -e; \
|
||||
make deploy_gha_workflows_table DBT_TARGET=$(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.create_gha_tasks --vars '{"RESUME_GHA_TASKS":True}' -t $(DBT_TARGET)
|
||||
|
||||
deploy_livequery:
|
||||
@set -e; \
|
||||
dbt run-operation fsc_evm.drop_livequery_schemas --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.livequery_grants --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET)
|
||||
|
||||
deploy_chain_phase_1:
|
||||
@set -e; \
|
||||
dbt run -m livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.livequery_grants --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
|
||||
dbt run-operation fsc_evm.call_sample_rpc_node -t $(DBT_TARGET); \
|
||||
if [ "$(DBT_TARGET)" != "prod" ]; then \
|
||||
if [ "$(RECEIPTS_BY_HASH_ENABLED)" = "true" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt test -m "fsc_evm,tag:chainhead"; \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True, "MAIN_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts_by_hash" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt test -m "fsc_evm,tag:chainhead"; \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True, "MAIN_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
|
||||
fi; \
|
||||
else \
|
||||
if [ "$(RECEIPTS_BY_HASH_ENABLED)" = "true" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts" --full-refresh --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt test -m "fsc_evm,tag:chainhead"; \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts_by_hash" --full-refresh --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt test -m "fsc_evm,tag:chainhead"; \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
|
||||
fi; \
|
||||
fi; \
|
||||
echo "# wait ~10 minutes"; \
|
||||
echo "# run deploy_chain_phase_2"
|
||||
|
||||
deploy_chain_phase_2:
|
||||
@set -e; \
|
||||
if [ "$(DBT_TARGET)" != "prod" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_2" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_BRONZE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_2" --full-refresh --vars '{"GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_BRONZE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
fi; \
|
||||
echo "# wait ~10 minutes"; \
|
||||
echo "# run deploy_chain_phase_3"
|
||||
|
||||
deploy_chain_phase_3:
|
||||
@set -e; \
|
||||
if [ "$(DBT_TARGET)" != "prod" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_2" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:phase_3" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_2" -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:phase_3" --full-refresh --vars '{"GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
|
||||
fi; \
|
||||
echo "# wait ~10 minutes"; \
|
||||
echo "# run deploy_chain_phase_4"
|
||||
|
||||
deploy_chain_phase_4:
|
||||
@set -e; \
|
||||
if [ "$(DBT_TARGET)" != "prod" ]; then \
|
||||
dbt run -m "fsc_evm,tag:phase_3" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:phase_4" --full-refresh -t $(DBT_TARGET); \
|
||||
else \
|
||||
dbt run -m "fsc_evm,tag:phase_3" -t $(DBT_TARGET); \
|
||||
dbt run -m "fsc_evm,tag:phase_4" --full-refresh -t $(DBT_TARGET); \
|
||||
make deploy_gha_tasks DBT_TARGET=$(DBT_TARGET); \
|
||||
fi; \
|
||||
|
||||
.PHONY: cleanup_time deploy_gha_workflows_table deploy_gha_tasks deploy_new_gha_tasks deploy_livequery deploy_chain_phase_1 deploy_chain_phase_2 deploy_chain_phase_3 deploy_chain_phase_4
|
||||
@ -1,8 +0,0 @@
|
||||
workflow_name,workflow_schedule
|
||||
dbt_run_scheduled_non_realtime,"2 * * * *"
|
||||
dbt_run_scheduled_curated,"0 */6 * * *"
|
||||
dbt_run_streamline_chainhead,"15,45 * * * *"
|
||||
dbt_run_streamline_decoder,"20 * * * *"
|
||||
dbt_test_intraday,"33 */4 * * *"
|
||||
dbt_run_streamline_decoded_logs_history,"17 16 * * 6"
|
||||
dbt_run_abi_refresh,"38 23 * * *"
|
||||
|
129
dbt_project.yml
129
dbt_project.yml
@ -17,6 +17,8 @@ test-paths: ["tests"]
|
||||
seed-paths: ["data"]
|
||||
macro-paths: ["macros"]
|
||||
snapshot-paths: ["snapshots"]
|
||||
docs-paths:
|
||||
["dbt_packages/fsc_evm/doc_descriptions", "models/doc_descriptions", "models"]
|
||||
|
||||
target-path: "target" # directory which will store compiled SQL files
|
||||
clean-targets: # directories to be removed by `dbt clean`
|
||||
@ -24,13 +26,6 @@ clean-targets: # directories to be removed by `dbt clean`
|
||||
- "dbt_modules"
|
||||
- "dbt_packages"
|
||||
|
||||
models:
|
||||
+copy_grants: true
|
||||
+persist_docs:
|
||||
relation: true
|
||||
columns: true
|
||||
+on_schema_change: "append_new_columns"
|
||||
|
||||
tests:
|
||||
+store_failures: true # all tests
|
||||
|
||||
@ -55,6 +50,61 @@ query-comment:
|
||||
# Configuring models
|
||||
# Full documentation: https://docs.getdbt.com/docs/configuring-models
|
||||
|
||||
models:
|
||||
bsc_models: # replace with the name of the chain
|
||||
+copy_grants: true
|
||||
+persist_docs:
|
||||
relation: true
|
||||
columns: true
|
||||
+on_schema_change: "append_new_columns"
|
||||
livequery_models:
|
||||
+materialized: ephemeral
|
||||
fsc_evm:
|
||||
+enabled: false # disable fsc_evm package by default
|
||||
+copy_grants: true
|
||||
+persist_docs:
|
||||
relation: true
|
||||
columns: true
|
||||
+on_schema_change: "append_new_columns"
|
||||
main_package:
|
||||
+enabled: false # disable top level package by default, enabled subpackages as needed
|
||||
admin:
|
||||
+enabled: true
|
||||
core:
|
||||
+enabled: true # enable subpackages, as needed
|
||||
bronze:
|
||||
+enabled: false
|
||||
token_reads:
|
||||
+enabled: true
|
||||
github_actions:
|
||||
+enabled: true
|
||||
labels:
|
||||
+enabled: true
|
||||
observability:
|
||||
+enabled: true
|
||||
prices:
|
||||
+enabled: true
|
||||
utils:
|
||||
+enabled: true
|
||||
decoder_package:
|
||||
+enabled: false
|
||||
abis:
|
||||
+enabled: true
|
||||
decoded_logs:
|
||||
+enabled: false
|
||||
gold:
|
||||
+enabled: true
|
||||
silver:
|
||||
+enabled: true
|
||||
streamline:
|
||||
+enabled: true
|
||||
curated_package:
|
||||
+enabled: false
|
||||
stats:
|
||||
+enabled: true
|
||||
scores_package:
|
||||
+enabled: false
|
||||
|
||||
# In this example config, we tell dbt to build all models in the example/ directory
|
||||
# as tables. These settings can be overridden in the individual model files
|
||||
# using the `{{ config(...) }}` macro.
|
||||
@ -63,19 +113,10 @@ vars:
|
||||
"dbt_date:time_zone": GMT
|
||||
STREAMLINE_INVOKE_STREAMS: False
|
||||
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
|
||||
STREAMLINE_RUN_HISTORY: False
|
||||
UPDATE_UDFS_AND_SPS: False
|
||||
UPDATE_SNOWFLAKE_TAGS: True
|
||||
WAIT: 0
|
||||
OBSERV_FULL_TEST: False
|
||||
OVERFLOWED_RECEIPTS: False
|
||||
OVERFLOWED_TRACES: False
|
||||
BALANCES_START: 0
|
||||
BALANCES_END: 10000000
|
||||
HEAL_MODEL: False
|
||||
HEAL_MODELS: []
|
||||
START_GHA_TASKS: False
|
||||
RELOAD_TRACES: False
|
||||
|
||||
#### STREAMLINE 2.0 BEGIN ####
|
||||
|
||||
@ -101,59 +142,3 @@ vars:
|
||||
- DBT_CLOUD_BSC
|
||||
|
||||
#### STREAMLINE 2.0 END ####
|
||||
|
||||
#### FSC_EVM BEGIN ####
|
||||
# Visit https://github.com/FlipsideCrypto/fsc-evm/wiki for more information on required and optional variables
|
||||
|
||||
### GLOBAL VARIABLES BEGIN ###
|
||||
## REQUIRED
|
||||
GLOBAL_PROD_DB_NAME: 'bsc'
|
||||
GLOBAL_NODE_SECRET_PATH: 'Vault/prod/bsc/quicknode/mainnet'
|
||||
GLOBAL_NODE_URL: '{service}/{Authentication}'
|
||||
GLOBAL_BLOCKS_PER_HOUR: 1200
|
||||
GLOBAL_USES_STREAMLINE_V1: True
|
||||
|
||||
### GLOBAL VARIABLES END ###
|
||||
|
||||
### MAIN_PACKAGE VARIABLES BEGIN ###
|
||||
|
||||
### CORE ###
|
||||
## REQUIRED
|
||||
|
||||
## OPTIONAL
|
||||
# GOLD_FULL_REFRESH: True
|
||||
# SILVER_FULL_REFRESH: True
|
||||
# BRONZE_FULL_REFRESH: True
|
||||
|
||||
# BLOCKS_COMPLETE_FULL_REFRESH: True
|
||||
# CONFIRM_BLOCKS_COMPLETE_FULL_REFRESH: True
|
||||
# TRACES_COMPLETE_FULL_REFRESH: True
|
||||
# RECEIPTS_COMPLETE_FULL_REFRESH: True
|
||||
# TRANSACTIONS_COMPLETE_FULL_REFRESH: True
|
||||
|
||||
# BLOCKS_TRANSACTIONS_REALTIME_TESTING_LIMIT: 3
|
||||
# BLOCKS_TRANSACTIONS_HISTORY_TESTING_LIMIT: 3
|
||||
# TRACES_REALTIME_TESTING_LIMIT: 3
|
||||
# TRACES_HISTORY_TESTING_LIMIT: 3
|
||||
# ARBTRACE_BLOCK_HISTORY_TESTING_LIMIT: 3
|
||||
# RECEIPTS_REALTIME_TESTING_LIMIT: 3
|
||||
# RECEIPTS_HISTORY_TESTING_LIMIT: 3
|
||||
# CONFIRM_BLOCKS_REALTIME_TESTING_LIMIT: 3
|
||||
# CONFIRM_BLOCKS_HISTORY_TESTING_LIMIT: 3
|
||||
|
||||
# ### MAIN_PACKAGE VARIABLES END ###
|
||||
|
||||
# ### DECODER_PACKAGE VARIABLES BEGIN ###
|
||||
|
||||
# ## REQUIRED
|
||||
|
||||
# ## OPTIONAL
|
||||
|
||||
# DECODED_LOGS_COMPLETE_FULL_REFRESH: True
|
||||
|
||||
# DECODED_LOGS_REALTIME_TESTING_LIMIT: 3
|
||||
# DECODED_LOGS_HISTORY_SQL_LIMIT: 1 #limit per monthly range
|
||||
|
||||
### DECODER_PACKAGE VARIABLES END ###
|
||||
|
||||
#### FSC_EVM END ####
|
||||
@ -1,17 +1,7 @@
|
||||
{% macro create_udfs() %}
|
||||
{% if var("UPDATE_UDFS_AND_SPS") %}
|
||||
{% if var("UPDATE_UDFS_AND_SPS", false) %}
|
||||
{% set sql %}
|
||||
CREATE schema if NOT EXISTS silver;
|
||||
{{ create_udtf_get_base_table(
|
||||
schema = "streamline"
|
||||
) }}
|
||||
{{ create_udf_get_chainhead() }}
|
||||
{{ create_udf_json_rpc() }}
|
||||
{{ create_udf_api() }}
|
||||
{{ create_udf_decode_array_string() }}
|
||||
{{ create_udf_decode_array_object() }}
|
||||
{{ create_udf_bulk_decode_logs() }}
|
||||
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{{- fsc_utils.create_udfs() -}}
|
||||
|
||||
@ -1,22 +0,0 @@
|
||||
{% macro standard_predicate(
|
||||
input_column = 'block_number'
|
||||
) -%}
|
||||
{%- set database_name = target.database -%}
|
||||
{%- set schema_name = generate_schema_name(
|
||||
node = model
|
||||
) -%}
|
||||
{%- set table_name = generate_alias_name(
|
||||
node = model
|
||||
) -%}
|
||||
{%- set tmp_table_name = table_name ~ '__dbt_tmp' -%}
|
||||
{%- set full_table_name = database_name ~ '.' ~ schema_name ~ '.' ~ table_name -%}
|
||||
{%- set full_tmp_table_name = database_name ~ '.' ~ schema_name ~ '.' ~ tmp_table_name -%}
|
||||
{{ full_table_name }}.{{ input_column }} >= (
|
||||
SELECT
|
||||
MIN(
|
||||
{{ input_column }}
|
||||
)
|
||||
FROM
|
||||
{{ full_tmp_table_name }}
|
||||
)
|
||||
{%- endmacro %}
|
||||
@ -1,124 +0,0 @@
|
||||
{% macro decoded_logs_history(backfill_mode=false) %}
|
||||
|
||||
{%- set params = {
|
||||
"sql_limit": var("DECODED_LOGS_HISTORY_SQL_LIMIT", 8000000),
|
||||
"producer_batch_size": var("DECODED_LOGS_HISTORY_PRODUCER_BATCH_SIZE", 400000),
|
||||
"worker_batch_size": var("DECODED_LOGS_HISTORY_WORKER_BATCH_SIZE", 100000)
|
||||
} -%}
|
||||
|
||||
{% set wait_time = var("DECODED_LOGS_HISTORY_WAIT_TIME", 60) %}
|
||||
{% set find_months_query %}
|
||||
SELECT
|
||||
DISTINCT date_trunc('month', block_timestamp)::date as month
|
||||
FROM {{ ref('core__fact_blocks') }}
|
||||
ORDER BY month ASC
|
||||
{% endset %}
|
||||
{% set results = run_query(find_months_query) %}
|
||||
|
||||
{% if execute %}
|
||||
{% set months = results.columns[0].values() %}
|
||||
|
||||
{% for month in months %}
|
||||
{% set view_name = 'decoded_logs_history_' ~ month.strftime('%Y_%m') %}
|
||||
|
||||
{% set create_view_query %}
|
||||
create or replace view streamline.{{view_name}} as (
|
||||
WITH target_blocks AS (
|
||||
SELECT
|
||||
block_number
|
||||
FROM {{ ref('core__fact_blocks') }}
|
||||
WHERE date_trunc('month', block_timestamp) = '{{month}}'::timestamp
|
||||
),
|
||||
new_abis AS (
|
||||
SELECT
|
||||
abi,
|
||||
parent_contract_address,
|
||||
event_signature,
|
||||
start_block,
|
||||
end_block
|
||||
FROM {{ ref('silver__complete_event_abis') }}
|
||||
{% if not backfill_mode %}
|
||||
WHERE inserted_timestamp > dateadd('day', -30, sysdate())
|
||||
{% endif %}
|
||||
),
|
||||
existing_logs_to_exclude AS (
|
||||
SELECT _log_id
|
||||
FROM {{ ref('streamline__decoded_logs_complete') }} l
|
||||
INNER JOIN target_blocks b using (block_number)
|
||||
),
|
||||
candidate_logs AS (
|
||||
SELECT
|
||||
l.block_number,
|
||||
l.tx_hash,
|
||||
l.event_index,
|
||||
l.contract_address,
|
||||
l.topics,
|
||||
l.data,
|
||||
concat(l.tx_hash::string, '-', l.event_index::string) as _log_id
|
||||
FROM target_blocks b
|
||||
INNER JOIN {{ ref('core__fact_event_logs') }} l using (block_number)
|
||||
WHERE l.tx_status = 'SUCCESS' and date_trunc('month', l.block_timestamp) = '{{month}}'::timestamp
|
||||
)
|
||||
SELECT
|
||||
l.block_number,
|
||||
l._log_id,
|
||||
A.abi,
|
||||
OBJECT_CONSTRUCT(
|
||||
'topics', l.topics,
|
||||
'data', l.data,
|
||||
'address', l.contract_address
|
||||
) AS data
|
||||
FROM candidate_logs l
|
||||
INNER JOIN new_abis A
|
||||
ON A.parent_contract_address = l.contract_address
|
||||
AND A.event_signature = l.topics[0]::STRING
|
||||
AND l.block_number BETWEEN A.start_block AND A.end_block
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM existing_logs_to_exclude e
|
||||
WHERE e._log_id = l._log_id
|
||||
)
|
||||
LIMIT {{ params.sql_limit }}
|
||||
)
|
||||
{% endset %}
|
||||
{# Create the view #}
|
||||
{% do run_query(create_view_query) %}
|
||||
{{ log("Created view for month " ~ month.strftime('%Y-%m'), info=True) }}
|
||||
{% if var("STREAMLINE_INVOKE_STREAMS", false) %}
|
||||
{# Check if rows exist first #}
|
||||
{% set check_rows_query %}
|
||||
SELECT EXISTS(SELECT 1 FROM streamline.{{view_name}} LIMIT 1)
|
||||
{% endset %}
|
||||
|
||||
{% set results = run_query(check_rows_query) %}
|
||||
{% set has_rows = results.columns[0].values()[0] %}
|
||||
|
||||
{% if has_rows %}
|
||||
{# Invoke streamline, if rows exist to decode #}
|
||||
{% set decode_query %}
|
||||
SELECT
|
||||
streamline.udf_bulk_decode_logs_v2(
|
||||
PARSE_JSON(
|
||||
$${ "external_table": "decoded_logs",
|
||||
"producer_batch_size": {{ params.producer_batch_size }},
|
||||
"sql_limit": {{ params.sql_limit }},
|
||||
"sql_source": "{{view_name}}",
|
||||
"worker_batch_size": {{ params.worker_batch_size }} }$$
|
||||
)
|
||||
);
|
||||
{% endset %}
|
||||
|
||||
{% do run_query(decode_query) %}
|
||||
{{ log("Triggered decoding for month " ~ month.strftime('%Y-%m'), info=True) }}
|
||||
{# Call wait since we actually did some decoding #}
|
||||
{% do run_query("call system$wait(" ~ wait_time ~ ")") %}
|
||||
{{ log("Completed wait after decoding for month " ~ month.strftime('%Y-%m'), info=True) }}
|
||||
{% else %}
|
||||
{{ log("No rows to decode for month " ~ month.strftime('%Y-%m'), info=True) }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,29 +0,0 @@
|
||||
{% macro run_decoded_logs_history() %}
|
||||
|
||||
{% set blockchain = var('GLOBAL_PROD_DB_NAME','').lower() %}
|
||||
|
||||
{% set check_for_new_user_abis_query %}
|
||||
select 1
|
||||
from {{ ref('silver__user_verified_abis') }}
|
||||
where _inserted_timestamp::date = sysdate()::date
|
||||
and dayname(sysdate()) <> 'Sat'
|
||||
{% endset %}
|
||||
{% set results = run_query(check_for_new_user_abis_query) %}
|
||||
{% if execute %}
|
||||
{% set new_user_abis = results.columns[0].values()[0] %}
|
||||
|
||||
{% if new_user_abis %}
|
||||
{% set invoke_workflow_query %}
|
||||
SELECT
|
||||
github_actions.workflow_dispatches(
|
||||
'FlipsideCrypto',
|
||||
'{{ blockchain }}' || '-models',
|
||||
'dbt_run_streamline_decoded_logs_history.yml',
|
||||
NULL
|
||||
)
|
||||
{% endset %}
|
||||
|
||||
{% do run_query(invoke_workflow_query) %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
@ -1,101 +0,0 @@
|
||||
{% macro streamline_external_table_query_decoder(
|
||||
source_name,
|
||||
source_version
|
||||
) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
job_created_time AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number,
|
||||
TO_DATE(
|
||||
concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5))
|
||||
) AS _partition_by_created_date
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_file_registration_history(
|
||||
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
|
||||
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}')
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
id :: STRING AS id,
|
||||
DATA,
|
||||
metadata,
|
||||
b.file_name,
|
||||
_inserted_timestamp,
|
||||
s._partition_by_block_number AS _partition_by_block_number,
|
||||
s._partition_by_created_date AS _partition_by_created_date
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
source_name ~ source_version
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b._partition_by_block_number = s._partition_by_block_number
|
||||
AND b._partition_by_created_date = s._partition_by_created_date
|
||||
WHERE
|
||||
b._partition_by_block_number = s._partition_by_block_number
|
||||
AND b._partition_by_created_date = s._partition_by_created_date
|
||||
AND s._partition_by_created_date >= DATEADD('day', -2, CURRENT_TIMESTAMP())
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro streamline_external_table_query_decoder_fr(
|
||||
source_name,
|
||||
source_version
|
||||
) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
registered_on AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number,
|
||||
TO_DATE(
|
||||
concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5))
|
||||
) AS _partition_by_created_date
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_files(
|
||||
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}'
|
||||
)
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
id :: STRING AS id,
|
||||
DATA,
|
||||
metadata,
|
||||
b.file_name,
|
||||
_inserted_timestamp,
|
||||
s._partition_by_block_number AS _partition_by_block_number,
|
||||
s._partition_by_created_date AS _partition_by_created_date
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
source_name ~ source_version
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b._partition_by_block_number = s._partition_by_block_number
|
||||
AND b._partition_by_created_date = s._partition_by_created_date
|
||||
WHERE
|
||||
b._partition_by_block_number = s._partition_by_block_number
|
||||
AND b._partition_by_created_date = s._partition_by_created_date
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
{% endmacro %}
|
||||
@ -1,141 +0,0 @@
|
||||
{% macro streamline_external_table_query(
|
||||
source_name,
|
||||
source_version,
|
||||
partition_function,
|
||||
balances,
|
||||
block_number,
|
||||
uses_receipts_by_hash
|
||||
) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
job_created_time AS _inserted_timestamp,
|
||||
file_name,
|
||||
{{ partition_function }} AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_file_registration_history(
|
||||
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
|
||||
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}')
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
b._inserted_timestamp
|
||||
|
||||
{% if balances %},
|
||||
r.block_timestamp :: TIMESTAMP AS block_timestamp
|
||||
{% endif %}
|
||||
|
||||
{% if block_number %},
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: STRING,
|
||||
s.metadata :request :"data" :id :: STRING,
|
||||
PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: STRING
|
||||
) :: INT AS block_number
|
||||
{% endif %}
|
||||
{% if uses_receipts_by_hash %},
|
||||
s.value :"TX_HASH" :: STRING AS tx_hash
|
||||
{% endif %}
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
source_name ~ source_version
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.partition_key
|
||||
|
||||
{% if balances %}
|
||||
JOIN {{ ref('_block_ranges') }}
|
||||
r
|
||||
ON r.block_number = COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: INT,
|
||||
s.value :"block_number" :: INT
|
||||
)
|
||||
{% endif %}
|
||||
WHERE
|
||||
b.partition_key = s.partition_key
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
{% endmacro %}
|
||||
|
||||
{% macro streamline_external_table_query_fr(
|
||||
source_name,
|
||||
source_version,
|
||||
partition_function,
|
||||
partition_join_key,
|
||||
balances,
|
||||
block_number,
|
||||
uses_receipts_by_hash
|
||||
) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
registered_on AS _inserted_timestamp,
|
||||
file_name,
|
||||
{{ partition_function }} AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_files(
|
||||
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}'
|
||||
)
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
b._inserted_timestamp
|
||||
|
||||
{% if balances %},
|
||||
r.block_timestamp :: TIMESTAMP AS block_timestamp
|
||||
{% endif %}
|
||||
|
||||
{% if block_number %},
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: STRING,
|
||||
s.value :"block_number" :: STRING,
|
||||
s.metadata :request :"data" :id :: STRING,
|
||||
PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: STRING
|
||||
) :: INT AS block_number
|
||||
{% endif %}
|
||||
{% if uses_receipts_by_hash %},
|
||||
s.value :"TX_HASH" :: STRING AS tx_hash
|
||||
{% endif %}
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
source_name ~ source_version
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.{{ partition_join_key }}
|
||||
|
||||
{% if balances %}
|
||||
JOIN {{ ref('_block_ranges') }}
|
||||
r
|
||||
ON r.block_number = COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: INT,
|
||||
s.value :"block_number" :: INT
|
||||
)
|
||||
{% endif %}
|
||||
WHERE
|
||||
b.partition_key = s.{{ partition_join_key }}
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
{% endmacro %}
|
||||
@ -1,36 +0,0 @@
|
||||
{% macro log_bronze_details(source_name, source_version, model_type, partition_function, partition_join_key, block_number, uses_receipts_by_hash) %}
|
||||
|
||||
{% if source_version != '' %}
|
||||
{% set source_version = '_' ~ source_version.lower() %}
|
||||
{% endif %}
|
||||
{% if model_type != '' %}
|
||||
{% set model_type = '_' ~ model_type %}
|
||||
{% endif %}
|
||||
|
||||
{%- if flags.WHICH == 'compile' and execute -%}
|
||||
|
||||
{{ log("=== Current Variable Settings ===", info=True) }}
|
||||
{{ log(source_name ~ model_type ~ '_PARTITION_FUNCTION: ' ~ partition_function, info=True) }}
|
||||
{{ log(source_name ~ model_type ~ '_PARTITION_JOIN_KEY: ' ~ partition_join_key, info=True) }}
|
||||
{{ log(source_name ~ model_type ~ '_BLOCK_NUMBER: ' ~ block_number, info=True) }}
|
||||
{% if uses_receipts_by_hash %}
|
||||
{{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }}
|
||||
{% endif %}
|
||||
|
||||
{{ log("", info=True) }}
|
||||
{{ log("=== Source Details ===", info=True) }}
|
||||
{{ log("Source: " ~ source('bronze_streamline', source_name.lower() ~ source_version.lower()), info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{% set config_log = '\n' %}
|
||||
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
|
||||
{% set config_log = config_log ~ '\n{{ config (\n' %}
|
||||
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
|
||||
{% set config_log = config_log ~ ') }}\n' %}
|
||||
{{ log(config_log, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{%- endif -%}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,29 +0,0 @@
|
||||
{% macro log_complete_details(post_hook, full_refresh_type, uses_receipts_by_hash) %}
|
||||
|
||||
{%- if flags.WHICH == 'compile' and execute -%}
|
||||
|
||||
{% if uses_receipts_by_hash %}
|
||||
|
||||
{{ log("=== Current Variable Settings ===", info=True) }}
|
||||
{{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% set config_log = '\n' %}
|
||||
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
|
||||
{% set config_log = config_log ~ '\n{{ config (\n' %}
|
||||
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' unique_key = "' ~ config.get('unique_key') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' cluster_by = "' ~ config.get('cluster_by') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' merge_update_columns = ' ~ config.get('merge_update_columns') | tojson ~ ',\n' %}
|
||||
{% set config_log = config_log ~ ' post_hook = "' ~ post_hook ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' incremental_predicates = ' ~ config.get('incremental_predicates') | tojson ~ ',\n' %}
|
||||
{% set config_log = config_log ~ ' full_refresh = ' ~ full_refresh_type ~ ',\n' %}
|
||||
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
|
||||
{% set config_log = config_log ~ ') }}\n' %}
|
||||
{{ log(config_log, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{%- endif -%}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,36 +0,0 @@
|
||||
{% macro log_model_details(vars=false, params=false) %}
|
||||
|
||||
{%- if execute -%}
|
||||
/*
|
||||
DBT Model Config:
|
||||
{{ model.config | tojson(indent=2) }}
|
||||
*/
|
||||
|
||||
{% if vars is not false %}
|
||||
|
||||
{% if var('LOG_MODEL_DETAILS', false) %}
|
||||
{{ log( vars | tojson(indent=2), info=True) }}
|
||||
{% endif %}
|
||||
/*
|
||||
Variables:
|
||||
{{ vars | tojson(indent=2) }}
|
||||
*/
|
||||
{% endif %}
|
||||
|
||||
{% if params is not false %}
|
||||
|
||||
{% if var('LOG_MODEL_DETAILS', false) %}
|
||||
{{ log( params | tojson(indent=2), info=True) }}
|
||||
{% endif %}
|
||||
/*
|
||||
Parameters:
|
||||
{{ params | tojson(indent=2) }}
|
||||
*/
|
||||
{% endif %}
|
||||
|
||||
/*
|
||||
Raw Code:
|
||||
{{ model.raw_code }}
|
||||
*/
|
||||
{%- endif -%}
|
||||
{% endmacro %}
|
||||
@ -1,55 +0,0 @@
|
||||
{% macro log_streamline_details(model_name, model_type, node_url, model_quantum_state, sql_limit, testing_limit, order_by_clause, new_build, streamline_params, uses_receipts_by_hash, method, method_params, min_block=0) %}
|
||||
|
||||
{%- if flags.WHICH == 'compile' and execute -%}
|
||||
|
||||
{{ log("=== Current Variable Settings ===", info=True) }}
|
||||
{{ log("START_UP_BLOCK: " ~ min_block, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{{ log("=== API Details ===", info=True) }}
|
||||
|
||||
{{ log("NODE_URL: " ~ node_url, info=True) }}
|
||||
{{ log("NODE_SECRET_PATH: " ~ var('GLOBAL_NODE_SECRET_PATH'), info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{{ log("=== Current Variable Settings ===", info=True) }}
|
||||
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_model_quantum_state').upper() ~ ': ' ~ model_quantum_state, info=True) }}
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_sql_limit').upper() ~ ': ' ~ sql_limit, info=True) }}
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_testing_limit').upper() ~ ': ' ~ testing_limit, info=True) }}
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper() ~ ': ' ~ order_by_clause, info=True) }}
|
||||
{{ log((model_name ~ '_' ~ model_type ~ '_new_build').upper() ~ ': ' ~ new_build, info=True) }}
|
||||
{{ log('USES_RECEIPTS_BY_HASH' ~ ': ' ~ uses_receipts_by_hash, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{{ log("=== RPC Details ===", info=True) }}
|
||||
|
||||
{{ log(model_name ~ ": {", info=True) }}
|
||||
{{ log(" method: '" ~ method ~ "',", info=True) }}
|
||||
{{ log(" method_params: " ~ method_params, info=True) }}
|
||||
{{ log("}", info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{% set params_str = streamline_params | tojson %}
|
||||
{% set params_formatted = params_str | replace('{', '{\n ') | replace('}', '\n }') | replace(', ', ',\n ') %}
|
||||
|
||||
{# Clean up the method_params formatting #}
|
||||
{% set params_formatted = params_formatted | replace('"method_params": "', '"method_params": "') | replace('\\n', ' ') | replace('\\u0027', "'") %}
|
||||
|
||||
{% set config_log = '\n' %}
|
||||
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
|
||||
{% set config_log = config_log ~ '\n{{ config (\n' %}
|
||||
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' post_hook = fsc_utils.if_data_call_function_v2(\n' %}
|
||||
{% set config_log = config_log ~ ' func = "streamline.udf_bulk_rest_api_v2",\n' %}
|
||||
{% set config_log = config_log ~ ' target = "' ~ this.schema ~ '.' ~ this.identifier ~ '",\n' %}
|
||||
{% set config_log = config_log ~ ' params = ' ~ params_formatted ~ '\n' %}
|
||||
{% set config_log = config_log ~ ' ),\n' %}
|
||||
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
|
||||
{% set config_log = config_log ~ ') }}\n' %}
|
||||
{{ log(config_log, info=True) }}
|
||||
{{ log("", info=True) }}
|
||||
|
||||
{%- endif -%}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,47 +0,0 @@
|
||||
{% macro set_default_variables_streamline(model_name, model_type) %}
|
||||
|
||||
{%- set node_url = var('GLOBAL_NODE_URL', '{Service}/{Authentication}') -%}
|
||||
{%- set node_secret_path = var('GLOBAL_NODE_SECRET_PATH', '') -%}
|
||||
{%- set model_quantum_state = var((model_name ~ '_' ~ model_type ~ '_quantum_state').upper(), 'streamline') -%}
|
||||
{%- set testing_limit = var((model_name ~ '_' ~ model_type ~ '_testing_limit').upper(), none) -%}
|
||||
{%- set new_build = var((model_name ~ '_' ~ model_type ~ '_new_build').upper(), false) -%}
|
||||
{%- set default_order = 'ORDER BY partition_key DESC, block_number DESC' if model_type.lower() == 'realtime'
|
||||
else 'ORDER BY partition_key ASC, block_number ASC' -%}
|
||||
{%- set order_by_clause = var((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper(), default_order) -%}
|
||||
{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%}
|
||||
|
||||
{%- set variables = {
|
||||
'node_url': node_url,
|
||||
'node_secret_path': node_secret_path,
|
||||
'model_quantum_state': model_quantum_state,
|
||||
'testing_limit': testing_limit,
|
||||
'new_build': new_build,
|
||||
'order_by_clause': order_by_clause,
|
||||
'uses_receipts_by_hash': uses_receipts_by_hash
|
||||
} -%}
|
||||
|
||||
{{ return(variables) }}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
{% macro set_default_variables_bronze(source_name, model_type) %}
|
||||
|
||||
{%- set partition_function = var(source_name ~ model_type ~ '_PARTITION_FUNCTION',
|
||||
"CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER)")
|
||||
-%}
|
||||
{%- set partition_join_key = var(source_name ~ model_type ~ '_PARTITION_JOIN_KEY', 'partition_key') -%}
|
||||
{%- set block_number = var(source_name ~ model_type ~ '_BLOCK_NUMBER', true) -%}
|
||||
{%- set balances = var(source_name ~ model_type ~ '_BALANCES', false) -%}
|
||||
{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%}
|
||||
|
||||
{%- set variables = {
|
||||
'partition_function': partition_function,
|
||||
'partition_join_key': partition_join_key,
|
||||
'block_number': block_number,
|
||||
'balances': balances,
|
||||
'uses_receipts_by_hash': uses_receipts_by_hash
|
||||
} -%}
|
||||
|
||||
{{ return(variables) }}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,57 +0,0 @@
|
||||
{% macro set_streamline_parameters(model_name, model_type, multiplier=1) %}
|
||||
|
||||
{%- set rpc_config_details = {
|
||||
"blocks_transactions": {
|
||||
"method": 'eth_getBlockByNumber',
|
||||
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE)',
|
||||
"exploded_key": ['result', 'result.transactions']
|
||||
},
|
||||
"receipts_by_hash": {
|
||||
"method": 'eth_getTransactionReceipt',
|
||||
"method_params": 'ARRAY_CONSTRUCT(tx_hash)'
|
||||
},
|
||||
"receipts": {
|
||||
"method": 'eth_getBlockReceipts',
|
||||
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number))',
|
||||
"exploded_key": ['result'],
|
||||
"lambdas": 2
|
||||
|
||||
},
|
||||
"traces": {
|
||||
"method": 'debug_traceBlockByNumber',
|
||||
"method_params": "ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s'))",
|
||||
"exploded_key": ['result'],
|
||||
"lambdas": 2
|
||||
},
|
||||
"confirm_blocks": {
|
||||
"method": 'eth_getBlockByNumber',
|
||||
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), FALSE)'
|
||||
}
|
||||
} -%}
|
||||
|
||||
{%- set rpc_config = rpc_config_details[model_name.lower()] -%}
|
||||
|
||||
{%- set params = {
|
||||
"external_table": var((model_name ~ '_' ~ model_type ~ '_external_table').upper(), model_name.lower()),
|
||||
"sql_limit": var((model_name ~ '_' ~ model_type ~ '_sql_limit').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier),
|
||||
"producer_batch_size": var((model_name ~ '_' ~ model_type ~ '_producer_batch_size').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier),
|
||||
"worker_batch_size": var(
|
||||
(model_name ~ '_' ~ model_type ~ '_worker_batch_size').upper(),
|
||||
(2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier) // (rpc_config.get('lambdas', 1))
|
||||
),
|
||||
"sql_source": (model_name ~ '_' ~ model_type).lower(),
|
||||
"method": rpc_config['method'],
|
||||
"method_params": rpc_config['method_params']
|
||||
} -%}
|
||||
|
||||
{%- if rpc_config.get('exploded_key') is not none -%}
|
||||
{%- do params.update({"exploded_key": tojson(rpc_config['exploded_key'])}) -%}
|
||||
{%- endif -%}
|
||||
|
||||
{%- if rpc_config.get('lambdas') is not none -%}
|
||||
{%- do params.update({"lambdas": rpc_config['lambdas']}) -%}
|
||||
{%- endif -%}
|
||||
|
||||
{{ return(params) }}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,12 +0,0 @@
|
||||
{% macro lookback() %}
|
||||
{% if execute and is_incremental() %}
|
||||
{% set query %}
|
||||
SELECT
|
||||
MAX(_inserted_timestamp) :: DATE - 3
|
||||
FROM
|
||||
{{ this }};
|
||||
{% endset %}
|
||||
{% set last_week = run_query(query).columns [0] [0] %}
|
||||
{% do return(last_week) %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
@ -1,17 +0,0 @@
|
||||
{% macro create_aws_bsc_api() %}
|
||||
{% if target.name == "prod" %}
|
||||
{% set sql %}
|
||||
CREATE api integration IF NOT EXISTS aws_bsc_api api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/bsc-api-prod-rolesnowflakeudfsAF733095-1GUY4JJNG9FQL' api_allowed_prefixes = (
|
||||
'https://u0bch3zf8l.execute-api.us-east-1.amazonaws.com/prod/'
|
||||
) enabled = TRUE;
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% else %}
|
||||
{% set sql %}
|
||||
CREATE api integration IF NOT EXISTS aws_bsc_api api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/bsc-api-dev-rolesnowflakeudfsAF733095-1S8A1YYLZ09VX' api_allowed_prefixes = (
|
||||
'https://q22a7542fk.execute-api.us-east-1.amazonaws.com/dev/'
|
||||
) enabled = TRUE;
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
@ -1,22 +0,0 @@
|
||||
{% macro create_udtf_get_base_table(schema) %}
|
||||
CREATE
|
||||
OR REPLACE FUNCTION {{ schema }}.udtf_get_base_table(
|
||||
max_height INTEGER
|
||||
) returns TABLE (
|
||||
height NUMBER
|
||||
) AS $$ WITH base AS (
|
||||
SELECT
|
||||
ROW_NUMBER() over (
|
||||
ORDER BY
|
||||
SEQ4()
|
||||
) AS id
|
||||
FROM
|
||||
TABLE(GENERATOR(rowcount => 100000000))
|
||||
)
|
||||
SELECT
|
||||
id AS height
|
||||
FROM
|
||||
base
|
||||
WHERE
|
||||
id <= max_height $$;
|
||||
{% endmacro %}
|
||||
@ -1,72 +0,0 @@
|
||||
{% macro create_udf_get_chainhead() %}
|
||||
CREATE EXTERNAL FUNCTION IF NOT EXISTS streamline.udf_get_chainhead() returns variant api_integration = aws_bsc_api AS {% if target.name == "prod" %}
|
||||
'https://u0bch3zf8l.execute-api.us-east-1.amazonaws.com/prod/get_chainhead'
|
||||
{% else %}
|
||||
'https://q22a7542fk.execute-api.us-east-1.amazonaws.com/dev/get_chainhead'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_json_rpc() %}
|
||||
CREATE EXTERNAL FUNCTION IF NOT EXISTS streamline.udf_json_rpc(
|
||||
json OBJECT
|
||||
) returns ARRAY api_integration = aws_bsc_api AS {% if target.name == "prod" %}
|
||||
'https://u0bch3zf8l.execute-api.us-east-1.amazonaws.com/prod/bulk_get_json_rpc'
|
||||
{% else %}
|
||||
'https://q22a7542fk.execute-api.us-east-1.amazonaws.com/dev/bulk_get_json_rpc'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_rest_api() %}
|
||||
CREATE EXTERNAL FUNCTION IF NOT EXISTS streamline.udf_rest_api(
|
||||
json OBJECT
|
||||
) returns ARRAY api_integration = aws_bsc_api AS {% if target.name == "prod" %}
|
||||
'https://u0bch3zf8l.execute-api.us-east-1.amazonaws.com/prod/bulk_get_rest_api'
|
||||
{% else %}
|
||||
'https://q22a7542fk.execute-api.us-east-1.amazonaws.com/dev/bulk_get_rest_api'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_api() %}
|
||||
CREATE EXTERNAL FUNCTION IF NOT EXISTS streamline.udf_api(
|
||||
method VARCHAR,
|
||||
url VARCHAR,
|
||||
headers OBJECT,
|
||||
DATA OBJECT
|
||||
) returns variant api_integration = aws_bsc_api AS {% if target.name == "prod" %}
|
||||
'https://u0bch3zf8l.execute-api.us-east-1.amazonaws.com/prod/udf_api'
|
||||
{% else %}
|
||||
'https://q22a7542fk.execute-api.us-east-1.amazonaws.com/dev/udf_api'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_decode_array_string() %}
|
||||
CREATE EXTERNAL FUNCTION IF NOT EXISTS streamline.udf_decode(
|
||||
abi ARRAY,
|
||||
DATA STRING
|
||||
) returns ARRAY api_integration = aws_bsc_api AS {% if target.name == "prod" %}
|
||||
'https://u0bch3zf8l.execute-api.us-east-1.amazonaws.com/prod/decode_function'
|
||||
{% else %}
|
||||
'https://q22a7542fk.execute-api.us-east-1.amazonaws.com/dev/decode_function'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_decode_array_object() %}
|
||||
CREATE EXTERNAL FUNCTION IF NOT EXISTS streamline.udf_decode(
|
||||
abi ARRAY,
|
||||
DATA OBJECT
|
||||
) returns ARRAY api_integration = aws_bsc_api AS {% if target.name == "prod" %}
|
||||
'https://u0bch3zf8l.execute-api.us-east-1.amazonaws.com/prod/decode_log'
|
||||
{% else %}
|
||||
'https://q22a7542fk.execute-api.us-east-1.amazonaws.com/dev/decode_log'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_bulk_decode_logs() %}
|
||||
CREATE EXTERNAL FUNCTION IF NOT EXISTS streamline.udf_bulk_decode_logs(
|
||||
json OBJECT
|
||||
) returns ARRAY api_integration = aws_bsc_api AS {% if target.name == "prod" %}
|
||||
'https://u0bch3zf8l.execute-api.us-east-1.amazonaws.com/prod/bulk_decode_logs'
|
||||
{% else %}
|
||||
'https://q22a7542fk.execute-api.us-east-1.amazonaws.com/dev/bulk_decode_logs'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
@ -1,25 +0,0 @@
|
||||
{% test missing_decoded_logs(model) %}
|
||||
SELECT
|
||||
l.block_number,
|
||||
CONCAT(
|
||||
l.tx_hash,
|
||||
'-',
|
||||
l.event_index
|
||||
) AS log_id
|
||||
FROM
|
||||
{{ ref('core__fact_event_logs') }}
|
||||
l
|
||||
LEFT JOIN {{ model }}
|
||||
d
|
||||
ON l.block_number = d.block_number
|
||||
AND CONCAT(
|
||||
l.tx_hash,
|
||||
'-',
|
||||
l.event_index
|
||||
) = d._log_id
|
||||
WHERE
|
||||
l.contract_address = LOWER('0xbb4CdB9CBd36B01bD1cBaEBF2De08d9173bc095c') -- WBNB
|
||||
AND l.topics [0] :: STRING = '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef' -- Transfer
|
||||
AND l.block_timestamp BETWEEN DATEADD('hour', -48, SYSDATE())
|
||||
AND DATEADD('hour', -6, SYSDATE())
|
||||
AND d._log_id IS NULL {% endtest %}
|
||||
@ -1,193 +0,0 @@
|
||||
{% macro missing_txs(
|
||||
model
|
||||
) %}
|
||||
WITH txs_base AS (
|
||||
SELECT
|
||||
block_number AS base_block_number,
|
||||
tx_hash AS base_tx_hash
|
||||
FROM
|
||||
{{ ref('test_silver__transactions_full') }}
|
||||
),
|
||||
model_name AS (
|
||||
SELECT
|
||||
block_number AS model_block_number,
|
||||
tx_hash AS model_tx_hash
|
||||
FROM
|
||||
{{ model }}
|
||||
)
|
||||
SELECT
|
||||
base_block_number,
|
||||
base_tx_hash,
|
||||
model_block_number,
|
||||
model_tx_hash
|
||||
FROM
|
||||
txs_base
|
||||
LEFT JOIN model_name
|
||||
ON base_block_number = model_block_number
|
||||
AND base_tx_hash = model_tx_hash
|
||||
WHERE
|
||||
model_tx_hash IS NULL
|
||||
OR model_block_number IS NULL
|
||||
{% endmacro %}
|
||||
|
||||
{% macro recent_missing_txs(
|
||||
model
|
||||
) %}
|
||||
WITH txs_base AS (
|
||||
SELECT
|
||||
block_number AS base_block_number,
|
||||
tx_hash AS base_tx_hash
|
||||
FROM
|
||||
{{ ref('test_silver__transactions_recent') }}
|
||||
),
|
||||
model_name AS (
|
||||
SELECT
|
||||
block_number AS model_block_number,
|
||||
tx_hash AS model_tx_hash
|
||||
FROM
|
||||
{{ model }}
|
||||
)
|
||||
SELECT
|
||||
base_block_number,
|
||||
base_tx_hash,
|
||||
model_block_number,
|
||||
model_tx_hash
|
||||
FROM
|
||||
txs_base
|
||||
LEFT JOIN model_name
|
||||
ON base_block_number = model_block_number
|
||||
AND base_tx_hash = model_tx_hash
|
||||
WHERE
|
||||
model_tx_hash IS NULL
|
||||
OR model_block_number IS NULL
|
||||
{% endmacro %}
|
||||
|
||||
{% macro missing_confirmed_txs(
|
||||
model1,
|
||||
model2
|
||||
) %}
|
||||
WITH txs_base AS (
|
||||
SELECT
|
||||
block_number AS base_block_number,
|
||||
block_hash AS base_block_hash,
|
||||
tx_hash AS base_tx_hash
|
||||
FROM
|
||||
{{ model1 }}
|
||||
),
|
||||
model_name AS (
|
||||
SELECT
|
||||
block_number AS model_block_number,
|
||||
block_hash AS model_block_hash,
|
||||
tx_hash AS model_tx_hash
|
||||
FROM
|
||||
{{ model2 }}
|
||||
)
|
||||
SELECT
|
||||
DISTINCT base_block_number AS block_number
|
||||
FROM
|
||||
txs_base
|
||||
LEFT JOIN model_name
|
||||
ON base_block_number = model_block_number
|
||||
AND base_tx_hash = model_tx_hash
|
||||
AND base_block_hash = model_block_hash
|
||||
WHERE
|
||||
model_tx_hash IS NULL
|
||||
AND model_block_number <= (
|
||||
SELECT
|
||||
MAX(base_block_number)
|
||||
FROM
|
||||
txs_base
|
||||
)
|
||||
{% endmacro %}
|
||||
|
||||
{% macro recent_missing_traces(
|
||||
model,
|
||||
threshold
|
||||
) %}
|
||||
WITH txs_base AS (
|
||||
SELECT
|
||||
block_number AS base_block_number,
|
||||
tx_hash AS base_tx_hash
|
||||
FROM
|
||||
{{ ref('test_silver__transactions_recent') }}
|
||||
),
|
||||
model_name AS (
|
||||
SELECT
|
||||
block_number AS model_block_number,
|
||||
tx_hash AS model_tx_hash
|
||||
FROM
|
||||
{{ model }}
|
||||
),
|
||||
FINAL AS (
|
||||
SELECT
|
||||
base_block_number,
|
||||
base_tx_hash,
|
||||
model_block_number,
|
||||
model_tx_hash
|
||||
FROM
|
||||
txs_base
|
||||
LEFT JOIN model_name
|
||||
ON base_block_number = model_block_number
|
||||
AND base_tx_hash = model_tx_hash
|
||||
WHERE
|
||||
model_tx_hash IS NULL
|
||||
OR model_block_number IS NULL
|
||||
)
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
FINAL
|
||||
WHERE
|
||||
(
|
||||
SELECT
|
||||
COUNT(*) >= {{ threshold }}
|
||||
FROM
|
||||
FINAL
|
||||
)
|
||||
{% endmacro %}
|
||||
|
||||
{% macro recent_missing_receipts(
|
||||
model,
|
||||
threshold
|
||||
) %}
|
||||
WITH txs_base AS (
|
||||
SELECT
|
||||
block_number AS base_block_number,
|
||||
tx_hash AS base_tx_hash
|
||||
FROM
|
||||
{{ ref('test_silver__transactions_recent') }}
|
||||
),
|
||||
model_name AS (
|
||||
SELECT
|
||||
block_number AS model_block_number,
|
||||
tx_hash AS model_tx_hash
|
||||
FROM
|
||||
{{ model }}
|
||||
),
|
||||
FINAL AS (
|
||||
SELECT
|
||||
base_block_number,
|
||||
base_tx_hash,
|
||||
model_block_number,
|
||||
model_tx_hash
|
||||
FROM
|
||||
txs_base
|
||||
LEFT JOIN model_name
|
||||
ON base_block_number = model_block_number
|
||||
AND base_tx_hash = model_tx_hash
|
||||
WHERE
|
||||
model_tx_hash IS NULL
|
||||
OR model_block_number IS NULL
|
||||
)
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
FINAL
|
||||
WHERE
|
||||
(
|
||||
SELECT
|
||||
COUNT(*) >= {{ threshold }}
|
||||
FROM
|
||||
FINAL
|
||||
)
|
||||
{% endmacro %}
|
||||
@ -1,78 +0,0 @@
|
||||
{% macro if_data_call_function(
|
||||
func,
|
||||
target
|
||||
) %}
|
||||
{% if var(
|
||||
"STREAMLINE_INVOKE_STREAMS"
|
||||
) %}
|
||||
{% if execute %}
|
||||
{{ log(
|
||||
"Running macro `if_data_call_function`: Calling udf " ~ func ~ " on " ~ target,
|
||||
True
|
||||
) }}
|
||||
{% endif %}
|
||||
SELECT
|
||||
{{ func }}
|
||||
WHERE
|
||||
EXISTS(
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
{{ target }}
|
||||
LIMIT
|
||||
1
|
||||
)
|
||||
{% else %}
|
||||
{% if execute %}
|
||||
{{ log(
|
||||
"Running macro `if_data_call_function`: NOOP",
|
||||
False
|
||||
) }}
|
||||
{% endif %}
|
||||
SELECT
|
||||
NULL
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro if_data_call_wait() %}
|
||||
{% if var(
|
||||
"STREAMLINE_INVOKE_STREAMS"
|
||||
) %}
|
||||
{% set query %}
|
||||
SELECT
|
||||
1
|
||||
WHERE
|
||||
EXISTS(
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
{{ model.schema ~ "." ~ model.alias }}
|
||||
LIMIT
|
||||
1
|
||||
) {% endset %}
|
||||
{% if execute %}
|
||||
{% set results = run_query(
|
||||
query
|
||||
) %}
|
||||
{% if results %}
|
||||
{{ log(
|
||||
"Waiting...",
|
||||
info = True
|
||||
) }}
|
||||
|
||||
{% set wait_query %}
|
||||
SELECT
|
||||
system$wait(
|
||||
{{ var(
|
||||
"WAIT",
|
||||
600
|
||||
) }}
|
||||
) {% endset %}
|
||||
{% do run_query(wait_query) %}
|
||||
{% else %}
|
||||
SELECT
|
||||
NULL;
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
@ -1,6 +1,6 @@
|
||||
{% docs __overview__ %}
|
||||
|
||||
# Welcome to the Flipside Crypto Binance Smart Chain Models Documentation!
|
||||
# Welcome to the Flipside Crypto BSC Models Documentation!
|
||||
|
||||
## **What does this documentation cover?**
|
||||
The documentation included here details the design of the BSC tables and views available via [Flipside Crypto.](https://flipsidecrypto.xyz/) For more information on how these models are built, please see [the github repository.](https://github.com/FlipsideCrypto/bsc-models)
|
||||
@ -16,48 +16,49 @@ There is more information on how to use dbt docs in the last section of this doc
|
||||
|
||||
**Click on the links below to jump to the documentation for each schema.**
|
||||
|
||||
### Core Tables (bsc.core)
|
||||
### Core Tables
|
||||
|
||||
**Dimension Tables:**
|
||||
- [dim_labels](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.core__dim_labels)
|
||||
- [dim_contracts](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.core__dim_contracts)
|
||||
**Dimensional Tables**
|
||||
- [dim_contracts](https://flipsidecrypto.github.io/bsc-models/#!/model/model.fsc_evm.core__dim_contracts)
|
||||
- [dim_contract_abis](https://flipsidecrypto.github.io/bsc-models/#!/model/model.fsc_evm.core__dim_contract_abis)
|
||||
- [dim_labels](https://flipsidecrypto.github.io/bsc-models/#!/model/model.fsc_evm.core__dim_labels)
|
||||
|
||||
**Fact Tables:**
|
||||
- [fact_blocks](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.core__fact_blocks)
|
||||
- [fact_event_logs](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.core__fact_event_logs)
|
||||
- [fact_decoded_event_logs](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.core__fact_decoded_event_logs)
|
||||
- [fact_token_transfers](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.core__fact_token_transfers)
|
||||
- [fact_traces](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.core__fact_traces)
|
||||
- [fact_transactions](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.core__fact_transactions)
|
||||
- [fact_blocks](https://flipsidecrypto.github.io/bsc-models/#!/model/model.fsc_evm.core__fact_blocks)
|
||||
- [fact_transactions](https://flipsidecrypto.github.io/bsc-models/#!/model/model.fsc_evm.core__fact_transactions)
|
||||
- [fact_event_logs](https://flipsidecrypto.github.io/bsc-models/#!/model/model.fsc_evm.core__fact_event_logs)
|
||||
- [fact_traces](https://flipsidecrypto.github.io/bsc-models/#!/model/model.fsc_evm.core__fact_traces)
|
||||
|
||||
**Convenience Tables:**
|
||||
- [ez_decoded_event_logs](https://flipsidecrypto.github.io/bsc-models/#!/model/model.fsc_evm.core__ez_decoded_event_logs)
|
||||
- [ez_native_transfers](https://flipsidecrypto.github.io/bsc-models/#!/model/model.fsc_evm.core__ez_native_transfers)
|
||||
- [ez_token_transfers](https://flipsidecrypto.github.io/bsc-models/#!/model/model.fsc_evm.core__ez_token_transfers)
|
||||
|
||||
### Price Tables (bsc.price)
|
||||
- [dim_asset_metadata](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.price__dim_asset_metadata)
|
||||
- [fact_prices_ohlc_hourly](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.price__fact_prices_ohlc_hourly)
|
||||
- [ez_asset_metadata](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.price__ez_asset_metadata)
|
||||
- [ez_prices_hourly](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.price__ez_prices_hourly)
|
||||
-
|
||||
**Convenience Tables:**
|
||||
- [ez_native_transfers](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.core__ez_native_transfers)
|
||||
- [ez_decoded_event_logs](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.core__ez_decoded_event_logs)
|
||||
- [ez_token_transfers](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.core__ez_token_transfers)
|
||||
- [dim_asset_metadata](https://flipsidecrypto.github.io/bsc-models/#!/model/model.fsc_evm.price__dim_asset_metadata)
|
||||
- [fact_prices_ohlc_hourly](https://flipsidecrypto.github.io/bsc-models/#!/model/model.fsc_evm.price__fact_prices_ohlc_hourly)
|
||||
- [ez_asset_metadata](https://flipsidecrypto.github.io/bsc-models/#!/model/model.fsc_evm.price__ez_asset_metadata)
|
||||
- [ez_prices_hourly](https://flipsidecrypto.github.io/bsc-models/#!/model/model.fsc_evm.price__ez_prices_hourly)
|
||||
|
||||
### DeFi Tables (bsc.defi)
|
||||
- [ez_bridge_activity](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.defi__ez_bridge_activity)
|
||||
- [ez_dex_swaps](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.defi__ez_dex_swaps)
|
||||
- [dim_dex_liquidity_pools](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.defi__dim_dex_liquidity_pools)
|
||||
- [ez_dex_swaps](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.core__ez_dex_swaps)
|
||||
- [ez_lending_borrows](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.defi__ez_lending_borrows)
|
||||
- [ez_lending_deposits](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.defi__ez_lending_deposits)
|
||||
- [ez_lending_flashloans](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.defi__ez_lending_flashloans)
|
||||
- [ez_lending_liquidations](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.defi__ez_lending_liquidations)
|
||||
- [ez_lending_repayments](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.defi__ez_lending_repayments)
|
||||
- [ez_lending_withdraws](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.defi__ez_lending_withdraws)
|
||||
- [ez_bridge_activity](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.defi__ez_bridge_activity)
|
||||
|
||||
### Flipside Partner Tables (bsc.partner_name)
|
||||
|
||||
### NFT Tables (bsc.nft)
|
||||
- [ez_nft_transfers](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.nft__ez_nft_transfers)
|
||||
- [ez_nft_transfers](https://flipsidecrypto.github.io/bsc-models/#!/model/model.fsc_evm.nft__ez_nft_transfers)
|
||||
- [ez_nft_sales](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.nft__ez_nft_sales)
|
||||
|
||||
### Stats Tables (bsc.stats)
|
||||
- [ez_core_metrics_hourly](https://flipsidecrypto.github.io/bsc-models/#!/model/model.bsc_models.stats__ez_core_metrics_hourly)
|
||||
- [ez_core_metrics_hourly](https://flipsidecrypto.github.io/bsc-models/#!/model/model.fsc_evm.stats__ez_core_metrics_hourly)
|
||||
|
||||
## **Helpful User-Defined Functions (UDFs)**
|
||||
|
||||
@ -77,6 +78,8 @@ The dimension tables are sourced from a variety of on-chain and off-chain source
|
||||
|
||||
Convenience views (denoted ez_) are a combination of different fact and dimension tables. These views are built to make it easier to query the data.
|
||||
|
||||
NOTE: BSC is currently operating in it's Testnet phase. Flipside will provide Mainnet data tables once BSC Mainnet is deployed.
|
||||
|
||||
## **Using dbt docs**
|
||||
### Navigation
|
||||
|
||||
@ -84,7 +87,7 @@ You can use the ```Project``` and ```Database``` navigation tabs on the left sid
|
||||
|
||||
### Database Tab
|
||||
|
||||
This view shows relations (tables and views) grouped into database schemas. Note that ephemeral models are *not* shown in this interface, as they do not exist in the database.
|
||||
This view shows relations (tables and views) grouped into database schemas. Note that ephemeral models are *not* shown in this interface, as they do not exist in the databsc.
|
||||
|
||||
### Graph Exploration
|
||||
|
||||
@ -98,10 +101,11 @@ Note that you can also right-click on models to interactively filter and explore
|
||||
|
||||
|
||||
### **More information**
|
||||
- [Flipside](https://flipsidecrypto.xyz)
|
||||
- [Flipside](https://flipsidecrypto.xyz/)
|
||||
- [Velocity](https://app.flipsidecrypto.com/velocity?nav=Discover)
|
||||
- [Tutorials](https://docs.flipsidecrypto.com/our-data/tutorials)
|
||||
- [Github](https://github.com/FlipsideCrypto/bsc-models)
|
||||
- [Query Editor Shortcuts](https://docs.flipsidecrypto.com/velocity/query-editor-shortcuts)
|
||||
- [What is dbt?](https://docs.getdbt.com/docs/introduction)
|
||||
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,87 +0,0 @@
|
||||
{{ config (
|
||||
materialized = "ephemeral"
|
||||
) }}
|
||||
|
||||
WITH retry AS (
|
||||
|
||||
SELECT
|
||||
contract_address,
|
||||
GREATEST(
|
||||
latest_call_block,
|
||||
latest_event_block
|
||||
) AS block_number,
|
||||
total_interaction_count
|
||||
FROM
|
||||
{{ ref("silver__relevant_contracts") }}
|
||||
r
|
||||
LEFT JOIN {{ source(
|
||||
'bsc_silver',
|
||||
'verified_abis'
|
||||
) }}
|
||||
v USING (contract_address)
|
||||
WHERE
|
||||
r.total_interaction_count >= 250 -- high interaction count
|
||||
AND GREATEST(
|
||||
max_inserted_timestamp_logs,
|
||||
max_inserted_timestamp_traces
|
||||
) >= CURRENT_DATE - INTERVAL '30 days' -- recent activity
|
||||
AND v.contract_address IS NULL -- no verified abi
|
||||
AND r.contract_address NOT IN (
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ source(
|
||||
'bsc_bronze_api',
|
||||
'contract_abis'
|
||||
) }}
|
||||
WHERE
|
||||
_inserted_timestamp >= CURRENT_DATE - INTERVAL '30 days' -- this won't let us retry the same contract within 30 days
|
||||
AND abi_data :data :result :: STRING <> 'Max rate limit reached'
|
||||
)
|
||||
ORDER BY
|
||||
total_interaction_count DESC
|
||||
LIMIT
|
||||
25
|
||||
), FINAL AS (
|
||||
SELECT
|
||||
proxy_address AS contract_address,
|
||||
start_block AS block_number
|
||||
FROM
|
||||
{{ ref("silver__proxies") }}
|
||||
p
|
||||
JOIN retry r USING (contract_address)
|
||||
LEFT JOIN {{ source(
|
||||
'bsc_silver',
|
||||
'verified_abis'
|
||||
) }}
|
||||
v
|
||||
ON v.contract_address = p.proxy_address
|
||||
WHERE
|
||||
v.contract_address IS NULL
|
||||
AND p.contract_address NOT IN (
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ source(
|
||||
'bsc_bronze_api',
|
||||
'contract_abis'
|
||||
) }}
|
||||
WHERE
|
||||
_inserted_timestamp >= CURRENT_DATE - INTERVAL '30 days' -- this won't let us retry the same contract within 30 days
|
||||
AND abi_data :data :result :: STRING <> 'Max rate limit reached'
|
||||
)
|
||||
UNION ALL
|
||||
SELECT
|
||||
contract_address,
|
||||
block_number
|
||||
FROM
|
||||
retry
|
||||
)
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
FINAL qualify ROW_NUMBER() over (
|
||||
PARTITION BY contract_address
|
||||
ORDER BY
|
||||
block_number DESC
|
||||
) = 1
|
||||
@ -1,68 +0,0 @@
|
||||
{{ config(
|
||||
materialized = 'incremental',
|
||||
unique_key = "contract_address",
|
||||
full_refresh = false,
|
||||
tags = ['curated']
|
||||
) }}
|
||||
|
||||
WITH base AS (
|
||||
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ ref('silver__relevant_contracts') }}
|
||||
WHERE
|
||||
total_interaction_count >= 1000
|
||||
|
||||
{% if is_incremental() %}
|
||||
and contract_address not in (
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ this }}
|
||||
WHERE
|
||||
abi_data :data :result :: STRING <> 'Max rate limit reached'
|
||||
)
|
||||
{% endif %}
|
||||
order by total_interaction_count desc
|
||||
LIMIT
|
||||
400
|
||||
), all_contracts AS (
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
base
|
||||
UNION
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ ref('_retry_abis') }}
|
||||
),
|
||||
row_nos AS (
|
||||
SELECT
|
||||
contract_address,
|
||||
ROW_NUMBER() over (
|
||||
ORDER BY
|
||||
contract_address
|
||||
) AS row_no
|
||||
FROM
|
||||
all_contracts
|
||||
),
|
||||
batched AS ({% for item in range(501) %}
|
||||
SELECT
|
||||
rn.contract_address, live.udf_api('GET', CONCAT('https://api.bscscan.com/api?module=contract&action=getabi&address=', rn.contract_address, '&apikey={key}'),{},{}, 'Vault/prod/block_explorers/bsc_scan') AS abi_data, SYSDATE() AS _inserted_timestamp
|
||||
FROM
|
||||
row_nos rn
|
||||
WHERE
|
||||
row_no = {{ item }}
|
||||
|
||||
{% if not loop.last %}
|
||||
UNION ALL
|
||||
{% endif %}
|
||||
{% endfor %})
|
||||
SELECT
|
||||
contract_address,
|
||||
abi_data,
|
||||
_inserted_timestamp
|
||||
FROM
|
||||
batched
|
||||
@ -1,22 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: bronze_api__contract_abis
|
||||
|
||||
columns:
|
||||
- name: _INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: day
|
||||
interval: 1
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_NTZ
|
||||
- name: CONTRACT_ADDRESS
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- VARCHAR
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: "^(0x)[0-9a-fA-F]{40}$"
|
||||
@ -1,128 +0,0 @@
|
||||
{{ config(
|
||||
materialized = 'incremental',
|
||||
unique_key = "contract_address",
|
||||
full_refresh = false,
|
||||
tags = ['non_realtime']
|
||||
) }}
|
||||
|
||||
WITH base AS (
|
||||
|
||||
SELECT
|
||||
contract_address,
|
||||
latest_event_block AS latest_block
|
||||
FROM
|
||||
{{ ref('silver__relevant_contracts') }}
|
||||
WHERE
|
||||
total_event_count >= 100
|
||||
|
||||
{% if is_incremental() %}
|
||||
AND contract_address NOT IN (
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
LIMIT
|
||||
1000
|
||||
), function_sigs AS (
|
||||
SELECT
|
||||
'0x313ce567' AS function_sig,
|
||||
'decimals' AS function_name
|
||||
UNION
|
||||
SELECT
|
||||
'0x06fdde03',
|
||||
'name'
|
||||
UNION
|
||||
SELECT
|
||||
'0x95d89b41',
|
||||
'symbol'
|
||||
),
|
||||
all_reads AS (
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
base
|
||||
JOIN function_sigs
|
||||
ON 1 = 1
|
||||
),
|
||||
ready_reads AS (
|
||||
SELECT
|
||||
contract_address,
|
||||
latest_block,
|
||||
function_sig,
|
||||
RPAD(
|
||||
function_sig,
|
||||
64,
|
||||
'0'
|
||||
) AS input,
|
||||
utils.udf_json_rpc_call(
|
||||
'eth_call',
|
||||
[{'to': contract_address, 'from': null, 'data': input}, utils.udf_int_to_hex(latest_block)],
|
||||
concat_ws(
|
||||
'-',
|
||||
contract_address,
|
||||
input,
|
||||
latest_block
|
||||
)
|
||||
) AS rpc_request
|
||||
FROM
|
||||
all_reads
|
||||
),
|
||||
batch_reads AS (
|
||||
SELECT
|
||||
ARRAY_AGG(rpc_request) AS batch_rpc_request
|
||||
FROM
|
||||
ready_reads
|
||||
),
|
||||
node_call AS (
|
||||
SELECT
|
||||
*,
|
||||
live.udf_api(
|
||||
'POST',
|
||||
CONCAT(
|
||||
'{service}',
|
||||
'/',
|
||||
'{Authentication}'
|
||||
),{},
|
||||
batch_rpc_request,
|
||||
'Vault/prod/bsc/quicknode/mainnet'
|
||||
) AS response
|
||||
FROM
|
||||
batch_reads
|
||||
WHERE
|
||||
EXISTS (
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
ready_reads
|
||||
LIMIT
|
||||
1
|
||||
)
|
||||
), flat_responses AS (
|
||||
SELECT
|
||||
VALUE :id :: STRING AS call_id,
|
||||
VALUE :result :: STRING AS read_result
|
||||
FROM
|
||||
node_call,
|
||||
LATERAL FLATTEN (
|
||||
input => response :data
|
||||
)
|
||||
)
|
||||
SELECT
|
||||
SPLIT_PART(
|
||||
call_id,
|
||||
'-',
|
||||
1
|
||||
) AS contract_address,
|
||||
SPLIT_PART(
|
||||
call_id,
|
||||
'-',
|
||||
3
|
||||
) AS block_number,
|
||||
LEFT(SPLIT_PART(call_id, '-', 2), 10) AS function_sig,
|
||||
NULL AS function_input,
|
||||
read_result,
|
||||
SYSDATE() :: TIMESTAMP AS _inserted_timestamp
|
||||
FROM
|
||||
flat_responses
|
||||
@ -1,18 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: bronze_api__token_reads
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- CONTRACT_ADDRESS
|
||||
- FUNCTION_SIG
|
||||
columns:
|
||||
- name: _INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: day
|
||||
interval: 1
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_NTZ
|
||||
@ -1,25 +0,0 @@
|
||||
{{ config(
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
system_created_at,
|
||||
insert_date,
|
||||
blockchain,
|
||||
address,
|
||||
creator,
|
||||
label_type,
|
||||
label_subtype,
|
||||
address_name,
|
||||
project_name,
|
||||
_is_deleted,
|
||||
modified_timestamp,
|
||||
labels_combined_id
|
||||
FROM
|
||||
{{ source(
|
||||
'crosschain_silver',
|
||||
'labels_combined'
|
||||
) }}
|
||||
WHERE
|
||||
blockchain = 'bsc'
|
||||
AND address LIKE '0x%'
|
||||
@ -1,80 +0,0 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['overflowed_receipts']
|
||||
) }}
|
||||
|
||||
{% for item in range(
|
||||
1,
|
||||
21
|
||||
) %}
|
||||
|
||||
SELECT
|
||||
o.file_name,
|
||||
f.block_number,
|
||||
f.index_vals,
|
||||
f.path,
|
||||
f.key,
|
||||
f.value_
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
file_name,
|
||||
file_url,
|
||||
index_cols,
|
||||
[overflowed_block, overflowed_tx] AS index_vals
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
block_number,
|
||||
POSITION,
|
||||
file_name,
|
||||
file_url,
|
||||
index_cols,
|
||||
VALUE [0] AS overflowed_block,
|
||||
VALUE [1] AS overflowed_tx,
|
||||
block_number = overflowed_block
|
||||
AND POSITION = overflowed_tx AS missing
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
block_number,
|
||||
POSITION,
|
||||
file_name,
|
||||
file_url,
|
||||
index_cols,
|
||||
utils.udf_detect_overflowed_responses(
|
||||
file_url,
|
||||
index_cols
|
||||
) AS index_vals
|
||||
FROM
|
||||
{{ ref("bronze__potential_overflowed_logs") }}
|
||||
WHERE
|
||||
row_no = {{ item }}
|
||||
),
|
||||
LATERAL FLATTEN (
|
||||
input => index_vals
|
||||
)
|
||||
)
|
||||
WHERE
|
||||
missing = TRUE
|
||||
) o,
|
||||
TABLE(
|
||||
utils.udtf_flatten_overflowed_responses(
|
||||
o.file_url,
|
||||
o.index_cols,
|
||||
[o.index_vals]
|
||||
)
|
||||
) f
|
||||
WHERE
|
||||
NOT IS_OBJECT(
|
||||
f.value_
|
||||
)
|
||||
AND NOT IS_ARRAY(
|
||||
f.value_
|
||||
)
|
||||
AND NOT IS_NULL_VALUE(
|
||||
f.value_
|
||||
) {% if not loop.last %}
|
||||
UNION ALL
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
@ -1,80 +0,0 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['overflowed_receipts']
|
||||
) }}
|
||||
|
||||
{% for item in range(
|
||||
1,
|
||||
21
|
||||
) %}
|
||||
|
||||
SELECT
|
||||
o.file_name,
|
||||
f.block_number,
|
||||
f.index_vals,
|
||||
f.path,
|
||||
f.key,
|
||||
f.value_
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
file_name,
|
||||
file_url,
|
||||
index_cols,
|
||||
[overflowed_block, overflowed_tx] AS index_vals
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
block_number,
|
||||
POSITION,
|
||||
file_name,
|
||||
file_url,
|
||||
index_cols,
|
||||
VALUE [0] AS overflowed_block,
|
||||
VALUE [1] AS overflowed_tx,
|
||||
block_number = overflowed_block
|
||||
AND POSITION = overflowed_tx AS missing
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
block_number,
|
||||
POSITION,
|
||||
file_name,
|
||||
file_url,
|
||||
index_cols,
|
||||
utils.udf_detect_overflowed_responses(
|
||||
file_url,
|
||||
index_cols
|
||||
) AS index_vals
|
||||
FROM
|
||||
{{ ref("bronze__potential_overflowed_receipts") }}
|
||||
WHERE
|
||||
row_no = {{ item }}
|
||||
),
|
||||
LATERAL FLATTEN (
|
||||
input => index_vals
|
||||
)
|
||||
)
|
||||
WHERE
|
||||
missing = TRUE
|
||||
) o,
|
||||
TABLE(
|
||||
utils.udtf_flatten_overflowed_responses(
|
||||
o.file_url,
|
||||
o.index_cols,
|
||||
[o.index_vals]
|
||||
)
|
||||
) f
|
||||
WHERE
|
||||
NOT IS_OBJECT(
|
||||
f.value_
|
||||
)
|
||||
AND NOT IS_ARRAY(
|
||||
f.value_
|
||||
)
|
||||
AND NOT IS_NULL_VALUE(
|
||||
f.value_
|
||||
) {% if not loop.last %}
|
||||
UNION ALL
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
@ -1,80 +0,0 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['overflowed_traces']
|
||||
) }}
|
||||
|
||||
{% for item in range(
|
||||
1,
|
||||
11
|
||||
) %}
|
||||
|
||||
SELECT
|
||||
o.file_name,
|
||||
f.block_number,
|
||||
f.index_vals,
|
||||
f.path,
|
||||
f.key,
|
||||
f.value_
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
file_name,
|
||||
file_url,
|
||||
index_cols,
|
||||
[overflowed_block, overflowed_tx] AS index_vals
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
block_number,
|
||||
POSITION,
|
||||
file_name,
|
||||
file_url,
|
||||
index_cols,
|
||||
VALUE [0] AS overflowed_block,
|
||||
VALUE [1] AS overflowed_tx,
|
||||
block_number = overflowed_block
|
||||
AND POSITION = overflowed_tx AS missing
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
block_number,
|
||||
POSITION,
|
||||
file_name,
|
||||
file_url,
|
||||
index_cols,
|
||||
utils.udf_detect_overflowed_responses(
|
||||
file_url,
|
||||
index_cols
|
||||
) AS index_vals
|
||||
FROM
|
||||
{{ ref("bronze__potential_overflowed_traces") }}
|
||||
WHERE
|
||||
row_no = {{ item }}
|
||||
),
|
||||
LATERAL FLATTEN (
|
||||
input => index_vals
|
||||
)
|
||||
)
|
||||
WHERE
|
||||
missing = TRUE
|
||||
) o,
|
||||
TABLE(
|
||||
utils.udtf_flatten_overflowed_responses(
|
||||
o.file_url,
|
||||
o.index_cols,
|
||||
[o.index_vals]
|
||||
)
|
||||
) f
|
||||
WHERE
|
||||
NOT IS_OBJECT(
|
||||
f.value_
|
||||
)
|
||||
AND NOT IS_ARRAY(
|
||||
f.value_
|
||||
)
|
||||
AND NOT IS_NULL_VALUE(
|
||||
f.value_
|
||||
) {% if not loop.last %}
|
||||
UNION ALL
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
@ -1,46 +0,0 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['overflowed_receipts']
|
||||
) }}
|
||||
|
||||
WITH missing_txs AS (
|
||||
|
||||
SELECT
|
||||
r.block_number,
|
||||
r.position,
|
||||
r.tx_hash,
|
||||
cr.file_name
|
||||
FROM
|
||||
{{ ref("silver__receipts") }}
|
||||
r
|
||||
JOIN {{ ref("streamline__receipts_complete") }}
|
||||
cr
|
||||
ON r.block_number = cr.block_number
|
||||
LEFT JOIN {{ ref("silver__logs") }}
|
||||
l
|
||||
ON r.block_number = l.block_number
|
||||
AND r.tx_hash = l.tx_hash
|
||||
WHERE
|
||||
overflowed
|
||||
AND l.tx_hash IS NULL
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
POSITION,
|
||||
tx_hash,
|
||||
file_name,
|
||||
build_scoped_file_url(
|
||||
@streamline.bronze.BSC_SERVERLESS_PROD,
|
||||
file_name
|
||||
) AS file_url,
|
||||
['block_number', 'array_index'] AS index_cols,
|
||||
ROW_NUMBER() over (
|
||||
ORDER BY
|
||||
block_number ASC,
|
||||
POSITION ASC
|
||||
) AS row_no
|
||||
FROM
|
||||
missing_txs
|
||||
ORDER BY
|
||||
block_number ASC,
|
||||
POSITION ASC
|
||||
@ -1,68 +0,0 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['overflowed_receipts']
|
||||
) }}
|
||||
|
||||
WITH impacted_blocks AS (
|
||||
|
||||
SELECT
|
||||
VALUE :: INT AS block_number
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
blocks_impacted_array
|
||||
FROM
|
||||
{{ ref("silver_observability__receipts_completeness") }}
|
||||
ORDER BY
|
||||
test_timestamp DESC
|
||||
LIMIT
|
||||
1
|
||||
), LATERAL FLATTEN (
|
||||
input => blocks_impacted_array
|
||||
)
|
||||
),
|
||||
all_txs AS (
|
||||
SELECT
|
||||
t.block_number,
|
||||
t.position,
|
||||
t.tx_hash
|
||||
FROM
|
||||
{{ ref("silver__transactions") }}
|
||||
t
|
||||
JOIN impacted_blocks USING (block_number)
|
||||
),
|
||||
missing_txs AS (
|
||||
SELECT
|
||||
DISTINCT block_number,
|
||||
POSITION,
|
||||
file_name
|
||||
FROM
|
||||
all_txs
|
||||
LEFT JOIN {{ ref("silver__receipts") }}
|
||||
tr USING (
|
||||
block_number,
|
||||
tx_hash
|
||||
)
|
||||
JOIN {{ ref("streamline__receipts_complete") }} USING (block_number)
|
||||
WHERE
|
||||
tr.tx_hash IS NULL
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
POSITION,
|
||||
file_name,
|
||||
build_scoped_file_url(
|
||||
@streamline.bronze.BSC_SERVERLESS_PROD,
|
||||
file_name
|
||||
) AS file_url,
|
||||
['block_number', 'array_index'] AS index_cols,
|
||||
ROW_NUMBER() over (
|
||||
ORDER BY
|
||||
block_number ASC,
|
||||
POSITION ASC
|
||||
) AS row_no
|
||||
FROM
|
||||
missing_txs
|
||||
ORDER BY
|
||||
block_number ASC,
|
||||
POSITION ASC
|
||||
@ -1,80 +0,0 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['overflowed_traces']
|
||||
) }}
|
||||
|
||||
WITH impacted_blocks AS (
|
||||
|
||||
SELECT
|
||||
blocks_impacted_array
|
||||
FROM
|
||||
{{ ref("silver_observability__traces_completeness") }}
|
||||
ORDER BY
|
||||
test_timestamp DESC
|
||||
LIMIT
|
||||
1
|
||||
), all_missing AS (
|
||||
SELECT
|
||||
DISTINCT VALUE :: INT AS block_number
|
||||
FROM
|
||||
impacted_blocks,
|
||||
LATERAL FLATTEN (
|
||||
input => blocks_impacted_array
|
||||
)
|
||||
),
|
||||
all_txs AS (
|
||||
SELECT
|
||||
block_number,
|
||||
POSITION AS tx_position,
|
||||
tx_hash
|
||||
FROM
|
||||
{{ ref("silver__transactions") }}
|
||||
JOIN all_missing USING (block_number)
|
||||
),
|
||||
missing_txs AS (
|
||||
SELECT
|
||||
DISTINCT txs.block_number,
|
||||
txs.tx_position,
|
||||
file_name
|
||||
FROM
|
||||
all_txs txs
|
||||
LEFT JOIN {{ source(
|
||||
'bsc_gold',
|
||||
'fact_traces'
|
||||
) }}
|
||||
tr2 USING (
|
||||
block_number,
|
||||
tx_position
|
||||
)
|
||||
JOIN {{ ref("streamline__traces_complete") }} USING (block_number)
|
||||
LEFT JOIN {{ source(
|
||||
'bsc_silver',
|
||||
'overflowed_traces'
|
||||
) }}
|
||||
ot USING (
|
||||
block_number,
|
||||
tx_position
|
||||
)
|
||||
WHERE
|
||||
tr2.block_number IS NULL
|
||||
AND ot.block_number IS NULL
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
tx_position AS POSITION,
|
||||
file_name,
|
||||
build_scoped_file_url(
|
||||
@streamline.bronze.BSC_SERVERLESS_PROD,
|
||||
file_name
|
||||
) AS file_url,
|
||||
['block_number', 'array_index'] AS index_cols,
|
||||
ROW_NUMBER() over (
|
||||
ORDER BY
|
||||
block_number ASC,
|
||||
POSITION ASC
|
||||
) AS row_no
|
||||
FROM
|
||||
missing_txs
|
||||
ORDER BY
|
||||
block_number ASC,
|
||||
POSITION ASC
|
||||
@ -1,26 +0,0 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
asset_id,
|
||||
symbol,
|
||||
NAME,
|
||||
decimals,
|
||||
blockchain,
|
||||
is_deprecated,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_native_asset_metadata_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'crosschain_silver',
|
||||
'complete_native_asset_metadata'
|
||||
) }}
|
||||
WHERE
|
||||
blockchain = 'bsc'
|
||||
AND symbol = 'BNB'
|
||||
@ -1,29 +0,0 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
HOUR,
|
||||
asset_id,
|
||||
symbol,
|
||||
NAME,
|
||||
decimals,
|
||||
price,
|
||||
blockchain,
|
||||
is_imputed,
|
||||
is_deprecated,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_native_prices_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'crosschain_silver',
|
||||
'complete_native_prices'
|
||||
) }}
|
||||
WHERE
|
||||
blockchain = 'bsc'
|
||||
AND symbol = 'BNB'
|
||||
@ -1,28 +0,0 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
asset_id,
|
||||
token_address,
|
||||
NAME,
|
||||
symbol,
|
||||
platform,
|
||||
platform_id,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_provider_asset_metadata_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'crosschain_silver',
|
||||
'complete_provider_asset_metadata'
|
||||
) }}
|
||||
WHERE
|
||||
platform IN (
|
||||
'BNB',
|
||||
'BNB Smart Chain (BEP20)'
|
||||
) -- platforms specific to Bsc
|
||||
@ -1,24 +0,0 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
asset_id,
|
||||
recorded_hour,
|
||||
OPEN,
|
||||
high,
|
||||
low,
|
||||
CLOSE,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_provider_prices_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'crosschain_silver',
|
||||
'complete_provider_prices'
|
||||
) }}
|
||||
-- prices for all ids
|
||||
@ -1,28 +0,0 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
token_address,
|
||||
asset_id,
|
||||
symbol,
|
||||
NAME,
|
||||
decimals,
|
||||
blockchain,
|
||||
blockchain_name,
|
||||
blockchain_id,
|
||||
is_deprecated,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_token_asset_metadata_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'crosschain_silver',
|
||||
'complete_token_asset_metadata'
|
||||
) }}
|
||||
WHERE
|
||||
blockchain = 'bsc'
|
||||
@ -1,31 +0,0 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
HOUR,
|
||||
token_address,
|
||||
asset_id,
|
||||
symbol,
|
||||
NAME,
|
||||
decimals,
|
||||
price,
|
||||
blockchain,
|
||||
blockchain_name,
|
||||
blockchain_id,
|
||||
is_imputed,
|
||||
is_deprecated,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_token_prices_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'crosschain_silver',
|
||||
'complete_token_prices'
|
||||
) }}
|
||||
WHERE
|
||||
blockchain = 'bsc'
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs borrow_symbol %}
|
||||
|
||||
The symbol of the asset that is repayed or borrowed, depending on the action
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs borrower %}
|
||||
|
||||
Its the address of the user who is Borrowing or repaying the loan, depending on the action.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,7 +0,0 @@
|
||||
{% docs borrow_action %}
|
||||
|
||||
The action that the user is taking.
|
||||
Borrow: user is borrowing an asset
|
||||
Repay: user is repaying the asset that they have borrowed in a previous loan
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,9 +0,0 @@
|
||||
{% docs borrow_amount %}
|
||||
|
||||
The meaning depends on the action:
|
||||
Borrow: The amount of the asset that the user is borrowing or
|
||||
Repay: The amount of the asset that the user is repaying
|
||||
Deposit_collateral: The amount of collateral that the user is depositing
|
||||
Withdraw_collateral: The amount of collateral that the user is withdrawing
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,8 +0,0 @@
|
||||
{% docs borrow_amount_usd %}
|
||||
|
||||
The meaning depends on the action:
|
||||
Borrow: The amount of the asset in USD that the user is borrowing or
|
||||
Repay: The amount of the asset in USD that the user is repaying
|
||||
Deposit_collateral: The amount of collateral in USD that the user is depositing
|
||||
Withdraw_collateral: The amount of collateral in USD that the user is withdrawing
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs borrow_asset %}
|
||||
|
||||
The address of the asset (token) that is being borrowed/repayed, depending on the action
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs collateral_address %}
|
||||
|
||||
The address of the asset that is used for collateral when borrowing funds.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs collateral_symbol %}
|
||||
|
||||
The symbol of the asset that is used for collateral when borrowing funds.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_depositor %}
|
||||
|
||||
Its the address of the user who is depositing for lending or withdrawing, depending on the action.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,4 +0,0 @@
|
||||
{% docs lending_asset_address %}
|
||||
|
||||
The address of the asset in the token pair. This asset is either deposited ot withdrawn for lending purposes.
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_pool_address %}
|
||||
|
||||
The address of the lending pool. For sushi this will be the address of the kashi pair.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_symbol %}
|
||||
|
||||
The symbol of the asset that is lent or withdrawn, depending on the action
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,7 +0,0 @@
|
||||
{% docs lending_action %}
|
||||
|
||||
The action that the user is taking.
|
||||
Deposit: user is depositing funds to be used for lending
|
||||
Withdraw: user has changed their mind and are no longer willing to lend, so they withdraw their asset
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_amount %}
|
||||
|
||||
The amount of the asset that the user is depositing or withdrawing, depending on the action.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_amount_usd %}
|
||||
|
||||
The amount of the asset that the user is depositing or withdrawing, depending on the action.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_asset %}
|
||||
|
||||
The address of the asset (token) that is being deposited/withdrawn, depending on the action
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_borrower_is_a_contract %}
|
||||
|
||||
If the depositor of collateral is a contract then its a Yes, if the depositor of collateral is a normal address it is a No.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_lender_is_a_contract %}
|
||||
|
||||
If the depositor is a contract then its a Yes, if the depositor is a normal address it is a No.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_lending_pool %}
|
||||
|
||||
The name of the lending pool.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_origin_from_address %}
|
||||
|
||||
The address of the user who initiates the transaction.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs lending_origin_to_address %}
|
||||
|
||||
The person who initiates the depositing transaction has to interact with this address. This address belongs to the lending platform or directs the transactio there.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs bsc_block_header_json %}
|
||||
|
||||
This JSON column contains the block header details.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs bsc_blockchain %}
|
||||
|
||||
The blockchain on which transactions are being confirmed.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs bsc_blocks_hash %}
|
||||
|
||||
The hash of the block header for a given block.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs bsc_blocks_nonce %}
|
||||
|
||||
Block nonce is a value used during mining to demonstrate proof of work for a given block.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs bsc_blocks_table_doc %}
|
||||
|
||||
This table contains block level data for the Binance Smart Chain. This table can be used to analyze trends at a block level, for example gas fees vs. total transactions over time. For more information on EVM transactions, please see [Etherscan Resources](https://etherscan.io/directory/Learning_Resources/Ethereum) or [The Ethereum Organization](https://ethereum.org/en/developers/docs/blocks/)
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs bsc_difficulty %}
|
||||
|
||||
The effort required to mine the block.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +0,0 @@
|
||||
{% docs bsc_extra_data %}
|
||||
|
||||
Any data included by the validator for a given block.
|
||||
|
||||
{% enddocs %}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user