mirror of
https://github.com/FlipsideCrypto/rise-models.git
synced 2026-02-06 11:06:56 +00:00
initial set up
This commit is contained in:
parent
f77248f6e1
commit
bf01e3486e
45
.github/workflows/dbt_deploy_new_workflows.yml
vendored
Normal file
45
.github/workflows/dbt_deploy_new_workflows.yml
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
name: dbt_deploy_new_workflows
|
||||
run-name: dbt_deploy_new_workflows
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Deploy New Github Actions
|
||||
run: |
|
||||
make deploy_new_github_action DBT_TARGET=prod
|
||||
76
.github/workflows/dbt_docs_update.yml
vendored
Normal file
76
.github/workflows/dbt_docs_update.yml
vendored
Normal file
@ -0,0 +1,76 @@
|
||||
name: docs_update
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
USE_VARS: "${{ vars.USE_VARS }}"
|
||||
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
|
||||
DBT_VERSION: "${{ vars.DBT_VERSION }}"
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: refresh ddl for datashare
|
||||
run: |
|
||||
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
|
||||
|
||||
- name: checkout docs branch
|
||||
run: |
|
||||
git checkout -B docs origin/main
|
||||
|
||||
- name: generate dbt docs
|
||||
run: dbt docs generate -t prod
|
||||
|
||||
- name: move files to docs directory
|
||||
run: |
|
||||
mkdir -p ./docs
|
||||
cp target/{catalog.json,manifest.json,index.html} docs/
|
||||
|
||||
- name: clean up target directory
|
||||
run: dbt clean
|
||||
|
||||
- name: check for changes
|
||||
run: git status
|
||||
|
||||
- name: stage changed files
|
||||
run: git add .
|
||||
|
||||
- name: commit changed files
|
||||
run: |
|
||||
git config user.email "abc@xyz"
|
||||
git config user.name "github-actions"
|
||||
git commit -am "Auto-update docs"
|
||||
|
||||
- name: push changes to docs
|
||||
run: |
|
||||
git push -f --set-upstream origin docs
|
||||
17
.github/workflows/dbt_integration_test.yml
vendored
Normal file
17
.github/workflows/dbt_integration_test.yml
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
name: dbt_run_integration_test
|
||||
run-name: ${{ github.event.inputs.branch }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt.yml@main
|
||||
with:
|
||||
command: >
|
||||
dbt test --selector 'integration_tests'
|
||||
environment: ${{ github.ref == 'refs/heads/main' && 'workflow_prod' || 'workflow_dev' }}
|
||||
warehouse: ${{ vars.WAREHOUSE }}
|
||||
secrets: inherit
|
||||
66
.github/workflows/dbt_run_adhoc.yml
vendored
Normal file
66
.github/workflows/dbt_run_adhoc.yml
vendored
Normal file
@ -0,0 +1,66 @@
|
||||
name: dbt_run_adhoc
|
||||
run-name: ${{ inputs.dbt_command }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
environment:
|
||||
type: choice
|
||||
description: DBT Run Environment
|
||||
required: true
|
||||
options:
|
||||
- dev
|
||||
- prod
|
||||
default: dev
|
||||
warehouse:
|
||||
type: choice
|
||||
description: Snowflake warehouse
|
||||
required: true
|
||||
options:
|
||||
- DBT
|
||||
- DBT_CLOUD
|
||||
- DBT_EMERGENCY
|
||||
default: DBT
|
||||
dbt_command:
|
||||
type: string
|
||||
description: 'DBT Run Command'
|
||||
required: true
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ inputs.warehouse }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_${{ inputs.environment }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
${{ inputs.dbt_command }}
|
||||
67
.github/workflows/dbt_run_deployment.yml
vendored
Normal file
67
.github/workflows/dbt_run_deployment.yml
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
name: dbt_run_deployment
|
||||
run-name: ${{ inputs.dbt_command }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
warehouse:
|
||||
type: choice
|
||||
description: Snowflake warehouse
|
||||
required: true
|
||||
options:
|
||||
- DBT
|
||||
- DBT_CLOUD
|
||||
- DBT_EMERGENCY
|
||||
default: DBT
|
||||
dbt_command:
|
||||
type: string
|
||||
description: 'DBT Run Command'
|
||||
required: true
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
DBT_VERSION: "${{ vars.DBT_VERSION }}"
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ inputs.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
dbt:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
${{ inputs.dbt_command }}
|
||||
- name: Run datashare model
|
||||
run: |
|
||||
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
|
||||
dbt run-operation run_query --args "{sql: call admin.datashare.sp_grant_share_permissions('${{ env.DATABASE }}')}"
|
||||
- name: Store logs
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: dbt-logs
|
||||
path: |
|
||||
logs
|
||||
target
|
||||
68
.github/workflows/dbt_run_dev_refresh.yml
vendored
Normal file
68
.github/workflows/dbt_run_dev_refresh.yml
vendored
Normal file
@ -0,0 +1,68 @@
|
||||
name: dbt_run_dev_refresh
|
||||
run-name: dbt_run_dev_refresh
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs_refresh:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run Dev Refresh
|
||||
run: |
|
||||
dbt run-operation fsc_evm.run_sp_create_prod_clone
|
||||
|
||||
run_dbt_jobs_udfs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: run_dbt_jobs_refresh
|
||||
environment:
|
||||
name: workflow_dev
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run Recreate UDFs
|
||||
run: |
|
||||
dbt run-operation fsc_utils.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
|
||||
dbt run -s livequery_models.deploy.core._live --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
|
||||
45
.github/workflows/dbt_run_scheduled_main.yml
vendored
Normal file
45
.github/workflows/dbt_run_scheduled_main.yml
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
name: dbt_run_scheduled_main
|
||||
run-name: dbt_run_scheduled_main
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run Main Models
|
||||
run: |
|
||||
dbt run -m "rise_models,tag:silver_testnet" "rise_models,tag:gold_testnet"
|
||||
49
.github/workflows/dbt_run_streamline_chainhead.yml
vendored
Normal file
49
.github/workflows/dbt_run_streamline_chainhead.yml
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
name: dbt_run_streamline_chainhead
|
||||
run-name: dbt_run_streamline_chainhead
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run Chainhead Models
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "rise_models,tag:streamline_testnet_complete" "rise_models,tag:streamline_testnet_realtime"
|
||||
|
||||
- name: Run Chainhead Tests
|
||||
run: |
|
||||
dbt test -m "rise_models,tag:chainhead"
|
||||
45
.github/workflows/dbt_run_streamline_history.yml
vendored
Normal file
45
.github/workflows/dbt_run_streamline_history.yml
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
name: dbt_run_streamline_history
|
||||
run-name: dbt_run_streamline_history
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run History Models
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "rise_models,tag:streamline_testnet_complete" "rise_models,tag:streamline_testnet_history"
|
||||
49
.github/workflows/dbt_test_daily.yml
vendored
Normal file
49
.github/workflows/dbt_test_daily.yml
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
name: dbt_test_daily
|
||||
run-name: dbt_test_daily
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Build Daily Testing Views
|
||||
run: |
|
||||
dbt run -m "fsc_evm,tag:daily_test"
|
||||
|
||||
- name: Run Daily Tests
|
||||
run: |
|
||||
dbt test -m "fsc_evm,tag:daily_test"
|
||||
49
.github/workflows/dbt_test_intraday.yml
vendored
Normal file
49
.github/workflows/dbt_test_intraday.yml
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
name: dbt_test_intraday
|
||||
run-name: dbt_test_intraday
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Build Recent Testing Views
|
||||
run: |
|
||||
dbt run -m "fsc_evm,tag:recent_test"
|
||||
|
||||
- name: Run Recent Tests
|
||||
run: |
|
||||
dbt test -m "fsc_evm,tag:recent_test"
|
||||
49
.github/workflows/dbt_test_monthly.yml
vendored
Normal file
49
.github/workflows/dbt_test_monthly.yml
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
name: dbt_test_monthly
|
||||
run-name: dbt_test_monthly
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Build Full Testing Views
|
||||
run: |
|
||||
dbt run -m "fsc_evm,tag:full_test"
|
||||
|
||||
- name: Run Full Tests
|
||||
run: |
|
||||
dbt test -m "fsc_evm,tag:full_test"
|
||||
20
.gitignore
vendored
Normal file
20
.gitignore
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
|
||||
target/
|
||||
dbt_modules/
|
||||
# newer versions of dbt use this directory instead of dbt_modules for test dependencies
|
||||
dbt_packages/
|
||||
logs/
|
||||
|
||||
.venv/
|
||||
.python-version
|
||||
|
||||
# Visual Studio Code files
|
||||
*/.vscode
|
||||
*.code-workspace
|
||||
.history/
|
||||
**/.DS_Store
|
||||
.vscode/
|
||||
.env
|
||||
dbt-env/
|
||||
|
||||
package-lock.yml
|
||||
145
README.md
Normal file
145
README.md
Normal file
@ -0,0 +1,145 @@
|
||||
## Repo Set Up
|
||||
1. Create a new repository from the [evm-models-template](https://github.com/FlipsideCrypto/evm-models-template)
|
||||
2. Update all references to `rise` to the new chain name, in lowercase by using find and replace
|
||||
3. Update the fsc-evm package version in `packages.yml` to the latest version
|
||||
4. Set up the rest of the dbt project, where applicable, including but not limited to:
|
||||
- `dbt_project.yml` (enable/disable packages, vars, etc.)
|
||||
- `.github/workflows` (update tags, etc.)
|
||||
- `github_actions__workflows.csv` (update schedule, workflows, etc.)
|
||||
- `overview.md` (update `rise`, table references, docs etc.)
|
||||
- `sources.yml` (update schemas, tables etc.)
|
||||
- `requirements.txt` (update dependencies)
|
||||
- other files where applicable
|
||||
|
||||
## Profile Set Up
|
||||
|
||||
#### Use the following within profiles.yml
|
||||
----
|
||||
|
||||
```yml
|
||||
<chain>: -- replace <chain>/<CHAIN> with the profile or name from, remove this comment in your yml
|
||||
target: dev
|
||||
outputs:
|
||||
dev:
|
||||
type: snowflake
|
||||
account: <ACCOUNT>
|
||||
role: INTERNAL_DEV
|
||||
user: <USERNAME>
|
||||
authenticator: externalbrowser
|
||||
region: us-east-1
|
||||
database: <CHAIN>_DEV
|
||||
warehouse: DBT
|
||||
schema: silver
|
||||
threads: 4
|
||||
client_session_keep_alive: False
|
||||
query_tag: dbt_<USERNAME>_dev
|
||||
|
||||
prod:
|
||||
type: snowflake
|
||||
account: <ACCOUNT>
|
||||
role: DBT_CLOUD_<CHAIN>
|
||||
user: <USERNAME>
|
||||
authenticator: externalbrowser
|
||||
region: us-east-1
|
||||
database: <CHAIN>
|
||||
warehouse: DBT_CLOUD_<CHAIN>
|
||||
schema: silver
|
||||
threads: 4
|
||||
client_session_keep_alive: False
|
||||
query_tag: dbt_<USERNAME>_dev
|
||||
```
|
||||
|
||||
### Common DBT Run Variables
|
||||
|
||||
The following variables can be used to control various aspects of the dbt run. Use them with the `--vars` flag when running dbt commands.
|
||||
|
||||
| Variable | Description | Example Usage |
|
||||
|----------|-------------|---------------|
|
||||
| `UPDATE_UDFS_AND_SPS` | Update User Defined Functions and Stored Procedures. By default, this is set to False | `--vars '{"UPDATE_UDFS_AND_SPS":true}'` |
|
||||
| `STREAMLINE_INVOKE_STREAMS` | Invoke Streamline processes. By default, this is set to False | `--vars '{"STREAMLINE_INVOKE_STREAMS":true}'` |
|
||||
| `STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES` | Use development environment for external tables. By default, this is set to False | `--vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}'` |
|
||||
| `HEAL_CURATED_MODEL` | Heal specific curated models. By default, this is set to an empty array []. See more below. | `--vars '{"HEAL_CURATED_MODEL":["axelar","across","celer_cbridge"]}'` |
|
||||
| `UPDATE_SNOWFLAKE_TAGS` | Control updating of Snowflake tags. By default, this is set to False | `--vars '{"UPDATE_SNOWFLAKE_TAGS":false}'` |
|
||||
| `START_GHA_TASKS` | Start GitHub Actions tasks. By default, this is set to False | `--vars '{"START_GHA_TASKS":true}'` |
|
||||
|
||||
#### Example Commands
|
||||
|
||||
1. Update UDFs and SPs:
|
||||
```
|
||||
dbt run --vars '{"UPDATE_UDFS_AND_SPS":true}' -m ...
|
||||
```
|
||||
|
||||
2. Invoke Streamline and use dev for external tables:
|
||||
```
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":true,"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -m ...
|
||||
```
|
||||
|
||||
3. Heal specific curated models:
|
||||
```
|
||||
dbt run --vars '{"HEAL_CURATED_MODEL":["axelar","across","celer_cbridge"]}' -m ...
|
||||
```
|
||||
|
||||
4. Update Snowflake tags for a specific model:
|
||||
```
|
||||
dbt run --vars '{"UPDATE_SNOWFLAKE_TAGS":true}' -s models/silver/utilities/silver__number_sequence.sql
|
||||
```
|
||||
|
||||
5. Start GHA tasks:
|
||||
```
|
||||
dbt seed -s github_actions__workflows && dbt run -m models/github_actions --full-refresh && dbt run-operation fsc_utils.create_gha_tasks --vars '{"START_GHA_TASKS":True}'
|
||||
```
|
||||
|
||||
6. Using two or more variables:
|
||||
```
|
||||
dbt run --vars '{"UPDATE_UDFS_AND_SPS":true,"STREAMLINE_INVOKE_STREAMS":true,"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -m ...
|
||||
```
|
||||
|
||||
> Note: Replace `-m ...` with appropriate model selections or tags as needed for your project structure.
|
||||
|
||||
## FSC_EVM
|
||||
|
||||
`fsc_evm` is a collection of macros, models, and other resources that are used to build the Flipside Crypto EVM models.
|
||||
|
||||
For more information on the `fsc_evm` package, see the [FSC_EVMWiki](https://github.com/FlipsideCrypto/fsc-evm/wiki).
|
||||
|
||||
## Applying Model Tags
|
||||
|
||||
### Database / Schema level tags
|
||||
|
||||
Database and schema tags are applied via the `fsc_evm.add_database_or_schema_tags` macro. These tags are inherited by their downstream objects. To add/modify tags call the appropriate tag set function within the macro.
|
||||
|
||||
```
|
||||
{{ fsc_evm.set_database_tag_value('SOME_DATABASE_TAG_KEY','SOME_DATABASE_TAG_VALUE') }}
|
||||
{{ fsc_evm.set_schema_tag_value('SOME_SCHEMA_TAG_KEY','SOME_SCHEMA_TAG_VALUE') }}
|
||||
```
|
||||
|
||||
### Model tags
|
||||
|
||||
To add/update a model's snowflake tags, add/modify the `meta` model property under `config`. Only table level tags are supported at this time via DBT.
|
||||
|
||||
{% raw %}
|
||||
{{ config(
|
||||
...,
|
||||
meta={
|
||||
'database_tags':{
|
||||
'table': {
|
||||
'PURPOSE': 'SOME_PURPOSE'
|
||||
}
|
||||
}
|
||||
},
|
||||
...
|
||||
) }}
|
||||
{% endraw %}
|
||||
|
||||
By default, model tags are pushed to Snowflake on each load. You can disable this by setting the `UPDATE_SNOWFLAKE_TAGS` project variable to `False` during a run.
|
||||
|
||||
```
|
||||
dbt run --vars '{"UPDATE_SNOWFLAKE_TAGS":False}' -s models/silver/utilities/silver__number_sequence.sql
|
||||
```
|
||||
|
||||
### Querying for existing tags on a model in snowflake
|
||||
|
||||
```
|
||||
select *
|
||||
from table(<chain>.information_schema.tag_references('<chain>.core.fact_blocks', 'table'));
|
||||
```
|
||||
0
analysis/.gitkeep
Normal file
0
analysis/.gitkeep
Normal file
0
data/.gitkeep
Normal file
0
data/.gitkeep
Normal file
4
data/github_actions__workflows.csv
Normal file
4
data/github_actions__workflows.csv
Normal file
@ -0,0 +1,4 @@
|
||||
workflow_name,workflow_schedule
|
||||
dbt_run_streamline_chainhead,"6,36 * * * *"
|
||||
dbt_run_dev_refresh,"8 5 * * 1"
|
||||
dbt_run_scheduled_main,"51 * * * *"
|
||||
|
122
dbt_project.yml
Normal file
122
dbt_project.yml
Normal file
@ -0,0 +1,122 @@
|
||||
# Name your project! Project names should contain only lowercase characters
|
||||
# and underscores. A good package name should reflect your organization's
|
||||
# name or the intended use of these models
|
||||
name: "rise_models" # replace with the name of the chain
|
||||
version: "1.0.0"
|
||||
config-version: 2
|
||||
|
||||
# This setting configures which "profile" dbt uses for this project.
|
||||
profile: "rise" # replace with the name of the chain
|
||||
|
||||
# These configurations specify where dbt should look for different types of files.
|
||||
# The `source-paths` config, for example, states that models in this project can be
|
||||
# found in the "models/" directory. You probably won't need to change these!
|
||||
model-paths: ["models"]
|
||||
analysis-paths: ["analysis"]
|
||||
test-paths: ["tests"]
|
||||
seed-paths: ["data"]
|
||||
macro-paths: ["macros"]
|
||||
snapshot-paths: ["snapshots"]
|
||||
docs-paths: ["dbt_packages/fsc_evm/doc_descriptions", "models/doc_descriptions", "models"]
|
||||
|
||||
target-path: "target" # directory which will store compiled SQL files
|
||||
clean-targets: # directories to be removed by `dbt clean`
|
||||
- "target"
|
||||
- "dbt_modules"
|
||||
- "dbt_packages"
|
||||
|
||||
tests:
|
||||
rise_models: # replace with the name of the chain
|
||||
+store_failures: true # all tests
|
||||
|
||||
on-run-start:
|
||||
- "{{ fsc_evm.create_sps() }}"
|
||||
- "{{ fsc_evm.create_udfs() }}"
|
||||
|
||||
on-run-end:
|
||||
- '{{ fsc_evm.apply_meta_as_tags(results) }}'
|
||||
|
||||
dispatch:
|
||||
- macro_namespace: dbt
|
||||
search_order:
|
||||
- rise-models
|
||||
- dbt_snowflake_query_tags
|
||||
- dbt
|
||||
|
||||
query-comment:
|
||||
comment: '{{ dbt_snowflake_query_tags.get_query_comment(node) }}'
|
||||
append: true # Snowflake removes prefixed comments.
|
||||
|
||||
# Configuring models
|
||||
# Full documentation: https://docs.getdbt.com/docs/configuring-models
|
||||
|
||||
models:
|
||||
rise_models: # replace with the name of the chain
|
||||
+copy_grants: true
|
||||
+persist_docs:
|
||||
relation: true
|
||||
columns: true
|
||||
+on_schema_change: "append_new_columns"
|
||||
livequery_models:
|
||||
+enabled: true # Keep livequery models enabled since you need them
|
||||
+materialized: ephemeral
|
||||
fsc_evm:
|
||||
+enabled: false # keep fsc_evm package disabled
|
||||
+copy_grants: true
|
||||
+persist_docs:
|
||||
relation: true
|
||||
columns: true
|
||||
+on_schema_change: "append_new_columns"
|
||||
|
||||
vars:
|
||||
"dbt_date:time_zone": GMT
|
||||
STREAMLINE_INVOKE_STREAMS: False
|
||||
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
|
||||
UPDATE_UDFS_AND_SPS: False
|
||||
UPDATE_SNOWFLAKE_TAGS: True
|
||||
OBSERV_FULL_TEST: False
|
||||
WAIT: 0
|
||||
HEAL_MODEL: False
|
||||
HEAL_MODELS: []
|
||||
START_GHA_TASKS: False
|
||||
|
||||
#### STREAMLINE 2.0 BEGIN ####
|
||||
|
||||
API_INTEGRATION: '{{ var("config")[target.name]["API_INTEGRATION"] if var("config")[target.name] else var("config")["dev"]["API_INTEGRATION"] }}'
|
||||
EXTERNAL_FUNCTION_URI: '{{ var("config")[target.name]["EXTERNAL_FUNCTION_URI"] if var("config")[target.name] else var("config")["dev"]["EXTERNAL_FUNCTION_URI"] }}'
|
||||
ROLES: |
|
||||
["INTERNAL_DEV"]
|
||||
|
||||
config:
|
||||
# The keys correspond to dbt profiles and are case sensitive
|
||||
dev:
|
||||
API_INTEGRATION: AWS_RISE_API_STG_V2
|
||||
EXTERNAL_FUNCTION_URI: 5o9bonenwi.execute-api.us-east-1.amazonaws.com/stg/
|
||||
ROLES:
|
||||
- AWS_LAMBDA_RISE_API # replace with the name of the chain
|
||||
- INTERNAL_DEV
|
||||
|
||||
# prod:
|
||||
# API_INTEGRATION: AWS_RISE_API_PROD_V2
|
||||
# EXTERNAL_FUNCTION_URI: <insert_uri>.execute-api.us-east-1.amazonaws.com/prod/
|
||||
# ROLES:
|
||||
# - AWS_LAMBDA_RISE_API # replace with the name of the chain
|
||||
# - INTERNAL_DEV
|
||||
# - DBT_CLOUD_RISE # replace with the name of the chain
|
||||
|
||||
#### STREAMLINE 2.0 END ####
|
||||
|
||||
#### FSC_EVM BEGIN ####
|
||||
|
||||
### GLOBAL VARIABLES BEGIN ###
|
||||
|
||||
## REQUIRED
|
||||
GLOBAL_PROD_DB_NAME: "rise"
|
||||
GLOBAL_NODE_SECRET_PATH: "insert_vault_path"
|
||||
GLOBAL_BLOCKS_PER_HOUR: 3600
|
||||
|
||||
### GLOBAL VARIABLES END ###
|
||||
|
||||
# Please visit https://github.com/FlipsideCrypto/fsc-evm/wiki for more information on required and optional variables
|
||||
|
||||
#### FSC_EVM END ####
|
||||
11
macros/custom_naming_macros.sql
Normal file
11
macros/custom_naming_macros.sql
Normal file
@ -0,0 +1,11 @@
|
||||
{% macro generate_schema_name(custom_schema_name=none, node=none) -%}
|
||||
{% set node_name = node.name %}
|
||||
{% set split_name = node_name.split('__') %}
|
||||
{{ split_name[0] | trim }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro generate_alias_name(custom_alias_name=none, node=none) -%}
|
||||
{% set node_name = node.name %}
|
||||
{% set split_name = node_name.split('__') %}
|
||||
{{ split_name[1] | trim }}
|
||||
{%- endmacro %}
|
||||
44
macros/dbt/get_merge_sql.sql
Normal file
44
macros/dbt/get_merge_sql.sql
Normal file
@ -0,0 +1,44 @@
|
||||
{% macro get_merge_sql(
|
||||
target,
|
||||
source,
|
||||
unique_key,
|
||||
dest_columns,
|
||||
incremental_predicates
|
||||
) -%}
|
||||
{% set predicate_override = "" %}
|
||||
{% if incremental_predicates [0] == "dynamic_range" %}
|
||||
-- run some queries to dynamically determine the min + max of this 'input_column' in the new data
|
||||
{% set input_column = incremental_predicates [1] %}
|
||||
{% set get_limits_query %}
|
||||
SELECT
|
||||
MIN(
|
||||
{{ input_column }}
|
||||
) AS lower_limit,
|
||||
MAX(
|
||||
{{ input_column }}
|
||||
) AS upper_limit
|
||||
FROM
|
||||
{{ source }}
|
||||
|
||||
{% endset %}
|
||||
{% set limits = run_query(get_limits_query) [0] %}
|
||||
{% set lower_limit,
|
||||
upper_limit = limits [0],
|
||||
limits [1] %}
|
||||
-- use those calculated min + max values to limit 'target' scan, to only the days with new data
|
||||
{% set predicate_override %}
|
||||
dbt_internal_dest.{{ input_column }} BETWEEN '{{ lower_limit }}'
|
||||
AND '{{ upper_limit }}' {% endset %}
|
||||
{% endif %}
|
||||
|
||||
{% set predicates = [predicate_override] if predicate_override else incremental_predicates %}
|
||||
-- standard merge from here
|
||||
{% set merge_sql = dbt.get_merge_sql(
|
||||
target,
|
||||
source,
|
||||
unique_key,
|
||||
dest_columns,
|
||||
predicates
|
||||
) %}
|
||||
{{ return(merge_sql) }}
|
||||
{% endmacro %}
|
||||
8
macros/dbt/get_tmp_relation_type.sql
Normal file
8
macros/dbt/get_tmp_relation_type.sql
Normal file
@ -0,0 +1,8 @@
|
||||
{% macro dbt_snowflake_get_tmp_relation_type(
|
||||
strategy,
|
||||
unique_key,
|
||||
language
|
||||
) %}
|
||||
-- always table
|
||||
{{ return('table') }}
|
||||
{% endmacro %}
|
||||
44
makefile
Normal file
44
makefile
Normal file
@ -0,0 +1,44 @@
|
||||
DBT_TARGET ?= dev
|
||||
|
||||
deploy_streamline_functions:
|
||||
rm -f package-lock.yml && dbt clean && dbt deps
|
||||
dbt run -s livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET)
|
||||
dbt run-operation fsc_utils.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET)
|
||||
|
||||
cleanup_time:
|
||||
rm -f package-lock.yml && dbt clean && dbt deps
|
||||
|
||||
deploy_streamline_tables:
|
||||
rm -f package-lock.yml && dbt clean && dbt deps
|
||||
ifeq ($(findstring dev,$(DBT_TARGET)),dev)
|
||||
dbt run -m "fsc_evm,tag:bronze_external" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":True}' -t $(DBT_TARGET)
|
||||
else
|
||||
dbt run -m "fsc_evm,tag:bronze_external" -t $(DBT_TARGET)
|
||||
endif
|
||||
dbt run -m "fsc_evm,tag:streamline_core_complete" "fsc_evm,tag:streamline_core_realtime" "fsc_evm,tag:utils" --full-refresh -t $(DBT_TARGET)
|
||||
|
||||
deploy_streamline_requests:
|
||||
rm -f package-lock.yml && dbt clean && dbt deps
|
||||
dbt run -m "fsc_evm,tag:streamline_core_complete" "fsc_evm,tag:streamline_core_realtime" --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET)
|
||||
|
||||
deploy_github_actions:
|
||||
dbt run -s livequery_models.deploy.marketplace.github --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET)
|
||||
dbt seed -s github_actions__workflows -t $(DBT_TARGET)
|
||||
dbt run -m models/github_actions --full-refresh -t $(DBT_TARGET)
|
||||
ifeq ($(findstring dev,$(DBT_TARGET)),dev)
|
||||
dbt run-operation fsc_utils.create_gha_tasks --vars '{"START_GHA_TASKS":False}' -t $(DBT_TARGET)
|
||||
else
|
||||
dbt run-operation fsc_utils.create_gha_tasks --vars '{"START_GHA_TASKS":True}' -t $(DBT_TARGET)
|
||||
endif
|
||||
|
||||
deploy_new_github_action:
|
||||
dbt run-operation fsc_evm.drop_github_actions_schema -t $(DBT_TARGET)
|
||||
dbt seed -s github_actions__workflows -t $(DBT_TARGET)
|
||||
dbt run -m models/github_actions --full-refresh -t $(DBT_TARGET)
|
||||
ifeq ($(findstring dev,$(DBT_TARGET)),dev)
|
||||
dbt run-operation fsc_utils.create_gha_tasks --vars '{"START_GHA_TASKS":False}' -t $(DBT_TARGET)
|
||||
else
|
||||
dbt run-operation fsc_utils.create_gha_tasks --vars '{"START_GHA_TASKS":True}' -t $(DBT_TARGET)
|
||||
endif
|
||||
|
||||
.PHONY: deploy_streamline_functions deploy_streamline_tables deploy_streamline_requests deploy_github_actions cleanup_time deploy_new_github_action
|
||||
72
models/__overview__.md
Normal file
72
models/__overview__.md
Normal file
@ -0,0 +1,72 @@
|
||||
{% docs __overview__ %}
|
||||
|
||||
# Welcome to the Flipside Crypto Monad Models Documentation!
|
||||
|
||||
## **What does this documentation cover?**
|
||||
The documentation included here details the design of the Core tables and views available via [Flipside Crypto.](https://flipsidecrypto.xyz/) For more information on how these models are built, please see [the github repository.](https://github.com/FlipsideCrypto/rise-models)
|
||||
|
||||
## **How do I use these docs?**
|
||||
The easiest way to navigate this documentation is to use the Quick Links below. These links will take you to the documentation for each table, which contains a description, a list of the columns, and other helpful information.
|
||||
|
||||
If you are experienced with dbt docs, feel free to use the sidebar to navigate the documentation, as well as explore the relationships between tables and the logic building them.
|
||||
|
||||
There is more information on how to use dbt docs in the last section of this document.
|
||||
|
||||
## **Quick Links to Table Documentation**
|
||||
|
||||
**Click on the links below to jump to the documentation for each schema.**
|
||||
|
||||
### Testnet Tables (rise.testnet)
|
||||
|
||||
**Fact Tables:**
|
||||
- [fact_blocks](https://flipsidecrypto.github.io/rise-models/#!/model/model.fsc_evm.core__fact_blocks)
|
||||
- [fact_event_logs](https://flipsidecrypto.github.io/rise-models/#!/model/model.fsc_evm.core__fact_event_logs)
|
||||
- [fact_transactions](https://flipsidecrypto.github.io/rise-models/#!/model/model.fsc_evm.core__fact_transactions)
|
||||
- [fact_traces](https://flipsidecrypto.github.io/rise-models/#!/model/model.fsc_evm.core__fact_traces)
|
||||
|
||||
## **Helpful User-Defined Functions (UDFs)**
|
||||
|
||||
UDFs are custom functions built by the Flipside team that can be used in your queries to make your life easier.
|
||||
|
||||
Please visit [LiveQuery Functions Overview](https://flipsidecrypto.github.io/livequery-models/#!/overview) for a full list of helpful UDFs.
|
||||
|
||||
## **Data Model Overview**
|
||||
|
||||
The Core models are built a few different ways, but the core fact tables are built using three layers of sql models: **bronze, silver, and gold (or core).**
|
||||
|
||||
- Bronze: Data is loaded in from the source as a view
|
||||
- Silver: All necessary parsing, filtering, de-duping, and other transformations are done here
|
||||
- Gold (or Core): Final views and tables that are available publicly
|
||||
|
||||
The dimension tables are sourced from a variety of on-chain and off-chain sources.
|
||||
|
||||
Convenience views (denoted ez_) are a combination of different fact and dimension tables. These views are built to make it easier to query the data.
|
||||
|
||||
## **Using dbt docs**
|
||||
### Navigation
|
||||
|
||||
You can use the ```Project``` and ```Database``` navigation tabs on the left side of the window to explore the models in the project.
|
||||
|
||||
### Database Tab
|
||||
|
||||
This view shows relations (tables and views) grouped into database schemas. Note that ephemeral models are *not* shown in this interface, as they do not exist in the database.
|
||||
|
||||
### Graph Exploration
|
||||
|
||||
You can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.
|
||||
|
||||
On model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the Expand button at the top-right of this lineage pane, you'll be able to see all of the models that are used to build, or are built from, the model you're exploring.
|
||||
|
||||
Once expanded, you'll be able to use the ```--models``` and ```--exclude``` model selection syntax to filter the models in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).
|
||||
|
||||
Note that you can also right-click on models to interactively filter and explore the graph.
|
||||
|
||||
|
||||
### **More information**
|
||||
- [Flipside](https://flipsidecrypto.xyz/)
|
||||
- [Data Studio](https://flipsidecrypto.xyz/studio)
|
||||
- [Tutorials](https://docs.flipsidecrypto.com/our-data/tutorials)
|
||||
- [Github](https://github.com/FlipsideCrypto/rise-models)
|
||||
- [What is dbt?](https://docs.getdbt.com/docs/introduction)
|
||||
|
||||
{% enddocs %}
|
||||
@ -0,0 +1,6 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
tags = ['gha_tasks']
|
||||
) }}
|
||||
|
||||
{{ fsc_utils.gha_task_current_status_view() }}
|
||||
6
models/github_actions/github_actions__task_history.sql
Normal file
6
models/github_actions/github_actions__task_history.sql
Normal file
@ -0,0 +1,6 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
tags = ['gha_tasks']
|
||||
) }}
|
||||
|
||||
{{ fsc_utils.gha_task_history_view() }}
|
||||
@ -0,0 +1,6 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
tags = ['gha_tasks']
|
||||
) }}
|
||||
|
||||
{{ fsc_utils.gha_task_performance_view() }}
|
||||
6
models/github_actions/github_actions__task_schedule.sql
Normal file
6
models/github_actions/github_actions__task_schedule.sql
Normal file
@ -0,0 +1,6 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
tags = ['gha_tasks']
|
||||
) }}
|
||||
|
||||
{{ fsc_utils.gha_task_schedule_view() }}
|
||||
6
models/github_actions/github_actions__tasks.sql
Normal file
6
models/github_actions/github_actions__tasks.sql
Normal file
@ -0,0 +1,6 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
tags = ['gha_tasks']
|
||||
) }}
|
||||
|
||||
{{ fsc_utils.gha_tasks_view() }}
|
||||
45
models/sources.yml
Normal file
45
models/sources.yml
Normal file
@ -0,0 +1,45 @@
|
||||
version: 2
|
||||
|
||||
sources:
|
||||
- name: github_actions
|
||||
database: "{{ target.database }}"
|
||||
schema: github_actions
|
||||
tables:
|
||||
- name: workflows
|
||||
- name: bronze_streamline
|
||||
database: streamline
|
||||
schema: >-
|
||||
{{ var('GLOBAL_PROD_DB_NAME') ~ ('_dev' if var("STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES", False) else '') }}
|
||||
tables:
|
||||
- name: testnet_blocks
|
||||
- name: testnet_transactions
|
||||
- name: testnet_receipts
|
||||
- name: testnet_traces
|
||||
- name: testnet_confirm_blocks
|
||||
- name: testnet_decoded_logs
|
||||
- name: crosschain_silver
|
||||
database: "{{ 'crosschain' if target.database.upper() == var('GLOBAL_PROD_DB_NAME').upper() else 'crosschain_dev' }}"
|
||||
schema: silver
|
||||
tables:
|
||||
- name: labels_combined
|
||||
- name: complete_provider_asset_metadata
|
||||
- name: complete_native_asset_metadata
|
||||
- name: complete_native_prices
|
||||
- name: complete_provider_prices
|
||||
- name: complete_token_asset_metadata
|
||||
- name: complete_token_prices
|
||||
- name: bronze_api
|
||||
database: "{{ target.database }}"
|
||||
schema: bronze_api
|
||||
tables:
|
||||
- name: contract_abis
|
||||
- name: crosschain_public
|
||||
database: crosschain
|
||||
schema: bronze_public
|
||||
tables:
|
||||
- name: user_abis
|
||||
- name: silver
|
||||
database: "{{ target.database }}"
|
||||
schema: silver
|
||||
tables:
|
||||
- name: verified_abis
|
||||
@ -0,0 +1,41 @@
|
||||
{{ config (
|
||||
materialized = 'view',
|
||||
tags = ['bronze_core']
|
||||
) }}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
job_created_time AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_file_registration_history(
|
||||
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
|
||||
table_name => '{{ source( "bronze_streamline", "testnet_blocks") }}')
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
b._inserted_timestamp,
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: STRING,
|
||||
s.metadata :request :"data" :id :: STRING,
|
||||
PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: STRING
|
||||
) :: INT AS block_number
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
"testnet_blocks"
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.partition_key
|
||||
WHERE
|
||||
b.partition_key = s.partition_key
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
@ -0,0 +1,42 @@
|
||||
{{ config (
|
||||
materialized = 'view',
|
||||
tags = ['bronze_core']
|
||||
) }}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
registered_on AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_files(
|
||||
table_name => '{{ source( "bronze_streamline", "testnet_blocks") }}'
|
||||
)
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
b._inserted_timestamp,
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: STRING,
|
||||
s.value :"block_number" :: STRING,
|
||||
s.metadata :request :"data" :id :: STRING,
|
||||
PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: STRING
|
||||
) :: INT AS block_number
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
"testnet_blocks"
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.partition_key
|
||||
WHERE
|
||||
b.partition_key = s.partition_key
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
@ -0,0 +1,41 @@
|
||||
{{ config (
|
||||
materialized = 'view',
|
||||
tags = ['bronze_core']
|
||||
) }}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
job_created_time AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_file_registration_history(
|
||||
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
|
||||
table_name => '{{ source( "bronze_streamline", "testnet_confirm_blocks") }}')
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
b._inserted_timestamp,
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: STRING,
|
||||
s.metadata :request :"data" :id :: STRING,
|
||||
PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: STRING
|
||||
) :: INT AS block_number
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
"testnet_confirm_blocks"
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.partition_key
|
||||
WHERE
|
||||
b.partition_key = s.partition_key
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
@ -0,0 +1,42 @@
|
||||
{{ config (
|
||||
materialized = 'view',
|
||||
tags = ['bronze_core']
|
||||
) }}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
registered_on AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_files(
|
||||
table_name => '{{ source( "bronze_streamline", "testnet_confirm_blocks") }}'
|
||||
)
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
b._inserted_timestamp,
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: STRING,
|
||||
s.value :"block_number" :: STRING,
|
||||
s.metadata :request :"data" :id :: STRING,
|
||||
PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: STRING
|
||||
) :: INT AS block_number
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
"testnet_confirm_blocks"
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.partition_key
|
||||
WHERE
|
||||
b.partition_key = s.partition_key
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
@ -0,0 +1,41 @@
|
||||
{{ config (
|
||||
materialized = 'view',
|
||||
tags = ['bronze_core']
|
||||
) }}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
job_created_time AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_file_registration_history(
|
||||
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
|
||||
table_name => '{{ source( "bronze_streamline", "testnet_receipts") }}')
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
b._inserted_timestamp,
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: STRING,
|
||||
s.metadata :request :"data" :id :: STRING,
|
||||
PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: STRING
|
||||
) :: INT AS block_number
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
"testnet_receipts"
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.partition_key
|
||||
WHERE
|
||||
b.partition_key = s.partition_key
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
@ -0,0 +1,42 @@
|
||||
{{ config (
|
||||
materialized = 'view',
|
||||
tags = ['bronze_core']
|
||||
) }}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
registered_on AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_files(
|
||||
table_name => '{{ source( "bronze_streamline", "testnet_receipts") }}'
|
||||
)
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
b._inserted_timestamp,
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: STRING,
|
||||
s.value :"block_number" :: STRING,
|
||||
s.metadata :request :"data" :id :: STRING,
|
||||
PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: STRING
|
||||
) :: INT AS block_number
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
"testnet_receipts"
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.partition_key
|
||||
WHERE
|
||||
b.partition_key = s.partition_key
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
@ -0,0 +1,41 @@
|
||||
{{ config (
|
||||
materialized = 'view',
|
||||
tags = ['bronze_core']
|
||||
) }}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
job_created_time AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_file_registration_history(
|
||||
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
|
||||
table_name => '{{ source( "bronze_streamline", "testnet_traces") }}')
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
b._inserted_timestamp,
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: STRING,
|
||||
s.metadata :request :"data" :id :: STRING,
|
||||
PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: STRING
|
||||
) :: INT AS block_number
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
"testnet_traces"
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.partition_key
|
||||
WHERE
|
||||
b.partition_key = s.partition_key
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
@ -0,0 +1,42 @@
|
||||
{{ config (
|
||||
materialized = 'view',
|
||||
tags = ['bronze_core']
|
||||
) }}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
registered_on AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_files(
|
||||
table_name => '{{ source( "bronze_streamline", "testnet_traces") }}'
|
||||
)
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
b._inserted_timestamp,
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: STRING,
|
||||
s.value :"block_number" :: STRING,
|
||||
s.metadata :request :"data" :id :: STRING,
|
||||
PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: STRING
|
||||
) :: INT AS block_number
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
"testnet_traces"
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.partition_key
|
||||
WHERE
|
||||
b.partition_key = s.partition_key
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
@ -0,0 +1,41 @@
|
||||
{{ config (
|
||||
materialized = 'view',
|
||||
tags = ['bronze_core']
|
||||
) }}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
job_created_time AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_file_registration_history(
|
||||
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
|
||||
table_name => '{{ source( "bronze_streamline", "testnet_transactions") }}')
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
b._inserted_timestamp,
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: STRING,
|
||||
s.metadata :request :"data" :id :: STRING,
|
||||
PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: STRING
|
||||
) :: INT AS block_number
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
"testnet_transactions"
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.partition_key
|
||||
WHERE
|
||||
b.partition_key = s.partition_key
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
@ -0,0 +1,42 @@
|
||||
{{ config (
|
||||
materialized = 'view',
|
||||
tags = ['bronze_core']
|
||||
) }}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
registered_on AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_files(
|
||||
table_name => '{{ source( "bronze_streamline", "testnet_transactions") }}'
|
||||
)
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
b._inserted_timestamp,
|
||||
COALESCE(
|
||||
s.value :"BLOCK_NUMBER" :: STRING,
|
||||
s.value :"block_number" :: STRING,
|
||||
s.metadata :request :"data" :id :: STRING,
|
||||
PARSE_JSON(
|
||||
s.metadata :request :"data"
|
||||
) :id :: STRING
|
||||
) :: INT AS block_number
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
"testnet_transactions"
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.partition_key
|
||||
WHERE
|
||||
b.partition_key = s.partition_key
|
||||
AND DATA :error IS NULL
|
||||
AND DATA IS NOT NULL
|
||||
@ -0,0 +1,130 @@
|
||||
{%- set node_url = var('GLOBAL_NODE_URL', '{Service}/{Authentication}') -%}
|
||||
{%- set node_secret_path = var('GLOBAL_NODE_SECRET_PATH', '') -%}
|
||||
|
||||
{{ config(
|
||||
materialized = 'incremental',
|
||||
unique_key = "contract_address",
|
||||
full_refresh = false,
|
||||
tags = ['bronze_testnet', 'recent_test', 'contracts']
|
||||
) }}
|
||||
|
||||
WITH base AS (
|
||||
|
||||
SELECT
|
||||
contract_address,
|
||||
latest_event_block AS latest_block
|
||||
FROM
|
||||
{{ ref('silver_testnet__relevant_contracts') }}
|
||||
WHERE
|
||||
total_event_count >= 25
|
||||
|
||||
{% if is_incremental() %}
|
||||
AND contract_address NOT IN (
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
total_event_count DESC
|
||||
LIMIT
|
||||
200
|
||||
), function_sigs AS (
|
||||
SELECT
|
||||
'0x313ce567' AS function_sig,
|
||||
'decimals' AS function_name
|
||||
UNION
|
||||
SELECT
|
||||
'0x06fdde03',
|
||||
'name'
|
||||
UNION
|
||||
SELECT
|
||||
'0x95d89b41',
|
||||
'symbol'
|
||||
),
|
||||
all_reads AS (
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
base
|
||||
JOIN function_sigs
|
||||
ON 1 = 1
|
||||
),
|
||||
ready_reads AS (
|
||||
SELECT
|
||||
contract_address,
|
||||
latest_block,
|
||||
function_sig,
|
||||
RPAD(
|
||||
function_sig,
|
||||
64,
|
||||
'0'
|
||||
) AS input,
|
||||
utils.udf_json_rpc_call(
|
||||
'eth_call',
|
||||
[{'to': contract_address, 'from': null, 'data': input}, utils.udf_int_to_hex(latest_block)],
|
||||
concat_ws(
|
||||
'-',
|
||||
contract_address,
|
||||
input,
|
||||
latest_block
|
||||
)
|
||||
) AS rpc_request
|
||||
FROM
|
||||
all_reads
|
||||
),
|
||||
batch_reads AS (
|
||||
SELECT
|
||||
ARRAY_AGG(rpc_request) AS batch_rpc_request
|
||||
FROM
|
||||
ready_reads
|
||||
),
|
||||
node_call AS (
|
||||
SELECT
|
||||
*,
|
||||
live.udf_api(
|
||||
'POST',
|
||||
'{{ node_url }}',
|
||||
{},
|
||||
batch_rpc_request,
|
||||
'{{ node_secret_path }}'
|
||||
) AS response
|
||||
FROM
|
||||
batch_reads
|
||||
WHERE
|
||||
EXISTS (
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
ready_reads
|
||||
LIMIT
|
||||
1
|
||||
)
|
||||
), flat_responses AS (
|
||||
SELECT
|
||||
VALUE :id :: STRING AS call_id,
|
||||
VALUE :result :: STRING AS read_result
|
||||
FROM
|
||||
node_call,
|
||||
LATERAL FLATTEN (
|
||||
input => response :data
|
||||
)
|
||||
)
|
||||
SELECT
|
||||
SPLIT_PART(
|
||||
call_id,
|
||||
'-',
|
||||
1
|
||||
) AS contract_address,
|
||||
SPLIT_PART(
|
||||
call_id,
|
||||
'-',
|
||||
3
|
||||
) AS block_number,
|
||||
LEFT(SPLIT_PART(call_id, '-', 2), 10) AS function_sig,
|
||||
NULL AS function_input,
|
||||
read_result,
|
||||
SYSDATE() :: TIMESTAMP AS _inserted_timestamp
|
||||
FROM
|
||||
flat_responses
|
||||
@ -0,0 +1,15 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: bronze_api_testnet__token_reads
|
||||
|
||||
columns:
|
||||
- name: _INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: day
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
46
models/testnet/core/gold/testnet__dim_contracts.sql
Normal file
46
models/testnet/core/gold/testnet__dim_contracts.sql
Normal file
@ -0,0 +1,46 @@
|
||||
{{ config(
|
||||
materialized = 'incremental',
|
||||
unique_key = 'address',
|
||||
merge_exclude_columns = ["inserted_timestamp"],
|
||||
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(address, symbol, name), SUBSTRING(address, symbol, name)",
|
||||
tags = ['gold_testnet', 'contracts']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
LOWER(COALESCE(c0.created_contract_address,c1.contract_address)) AS address,
|
||||
c1.token_symbol AS symbol,
|
||||
c1.token_name AS NAME,
|
||||
c1.token_decimals AS decimals,
|
||||
c0.block_number AS created_block_number,
|
||||
c0.block_timestamp AS created_block_timestamp,
|
||||
c0.tx_hash AS created_tx_hash,
|
||||
c0.creator_address AS creator_address,
|
||||
c0.created_contracts_id AS dim_contracts_id,
|
||||
GREATEST(COALESCE(c0.inserted_timestamp, '2000-01-01'), COALESCE(c1.inserted_timestamp, '2000-01-01')) AS inserted_timestamp,
|
||||
GREATEST(COALESCE(c0.modified_timestamp, '2000-01-01'), COALESCE(c1.modified_timestamp, '2000-01-01')) AS modified_timestamp
|
||||
FROM
|
||||
{{ ref('silver_testnet__created_contracts') }}
|
||||
c0
|
||||
FULL OUTER JOIN {{ ref('silver_testnet__contracts') }}
|
||||
c1
|
||||
ON LOWER(
|
||||
c0.created_contract_address
|
||||
) = LOWER(
|
||||
c1.contract_address
|
||||
)
|
||||
{% if is_incremental() %}
|
||||
WHERE
|
||||
c0.modified_timestamp > (
|
||||
SELECT
|
||||
COALESCE(MAX(modified_timestamp), '1970-01-01' :: TIMESTAMP) AS modified_timestamp
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
OR
|
||||
c1.modified_timestamp > (
|
||||
SELECT
|
||||
COALESCE(MAX(modified_timestamp), '1970-01-01' :: TIMESTAMP) AS modified_timestamp
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
28
models/testnet/core/gold/testnet__dim_contracts.yml
Normal file
28
models/testnet/core/gold/testnet__dim_contracts.yml
Normal file
@ -0,0 +1,28 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: testnet__dim_contracts
|
||||
description: '{{ doc("evm_contracts_table_doc") }}'
|
||||
|
||||
columns:
|
||||
- name: ADDRESS
|
||||
description: '{{ doc("evm_contracts_contract_address") }}'
|
||||
- name: SYMBOL
|
||||
description: '{{ doc("evm_contracts_symbol") }}'
|
||||
- name: NAME
|
||||
description: '{{ doc("evm_contracts_name") }}'
|
||||
- name: DECIMALS
|
||||
description: '{{ doc("evm_decimals") }}'
|
||||
- name: CREATED_BLOCK_NUMBER
|
||||
description: '{{ doc("evm_contracts_block_number") }}'
|
||||
- name: CREATED_BLOCK_TIMESTAMP
|
||||
description: '{{ doc("evm_contracts_block_time") }}'
|
||||
- name: CREATED_TX_HASH
|
||||
description: '{{ doc("evm_contracts_created_tx_hash") }}'
|
||||
- name: CREATOR_ADDRESS
|
||||
description: '{{ doc("evm_creator_address") }}'
|
||||
- name: DIM_CONTRACTS_ID
|
||||
description: '{{ doc("evm_pk") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("evm_inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: '{{ doc("evm_modified_timestamp") }}'
|
||||
73
models/testnet/core/gold/testnet__fact_blocks.sql
Normal file
73
models/testnet/core/gold/testnet__fact_blocks.sql
Normal file
@ -0,0 +1,73 @@
|
||||
{{ config (
|
||||
materialized = "incremental",
|
||||
incremental_strategy = 'delete+insert',
|
||||
unique_key = "block_number",
|
||||
cluster_by = ['block_timestamp::DATE'],
|
||||
tags = ['gold_testnet']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
block_number,
|
||||
block_json :hash :: STRING AS block_hash,
|
||||
utils.udf_hex_to_int(
|
||||
block_json :timestamp :: STRING
|
||||
) :: TIMESTAMP AS block_timestamp,
|
||||
'testnet' AS network,
|
||||
ARRAY_SIZE(
|
||||
block_json :transactions
|
||||
) AS tx_count,
|
||||
utils.udf_hex_to_int(
|
||||
block_json :size :: STRING
|
||||
) :: bigint AS SIZE,
|
||||
block_json :miner :: STRING AS miner,
|
||||
block_json :mixHash :: STRING AS mix_hash,
|
||||
block_json :extraData :: STRING AS extra_data,
|
||||
block_json :parentHash :: STRING AS parent_hash,
|
||||
utils.udf_hex_to_int(
|
||||
block_json :gasUsed :: STRING
|
||||
) :: bigint AS gas_used,
|
||||
utils.udf_hex_to_int(
|
||||
block_json :gasLimit :: STRING
|
||||
) :: bigint AS gas_limit,
|
||||
utils.udf_hex_to_int(
|
||||
block_json :baseFeePerGas :: STRING
|
||||
) :: bigint AS base_fee_per_gas,
|
||||
utils.udf_hex_to_int(
|
||||
block_json :difficulty :: STRING
|
||||
) :: bigint AS difficulty,
|
||||
utils.udf_hex_to_int(
|
||||
block_json :totalDifficulty :: STRING
|
||||
) :: bigint AS total_difficulty,
|
||||
block_json :sha3Uncles :: STRING AS sha3_uncles,
|
||||
block_json :uncles AS uncle_blocks,
|
||||
utils.udf_hex_to_int(
|
||||
block_json :nonce :: STRING
|
||||
) :: bigint AS nonce,
|
||||
block_json :receiptsRoot :: STRING AS receipts_root,
|
||||
block_json :stateRoot :: STRING AS state_root,
|
||||
block_json :transactionsRoot :: STRING AS transactions_root,
|
||||
block_json :logsBloom :: STRING AS logs_bloom,
|
||||
utils.udf_hex_to_int(
|
||||
block_json :blobGasUsed :: STRING
|
||||
) :: bigint AS blob_gas_used,
|
||||
utils.udf_hex_to_int(
|
||||
block_json :excessBlobGas :: STRING
|
||||
) :: bigint AS excess_blob_gas,
|
||||
block_json :parentBeaconBlockRoot :: STRING AS parent_beacon_block_root,
|
||||
block_json :withdrawals AS withdrawals,
|
||||
block_json :withdrawalsRoot :: STRING AS withdrawals_root,
|
||||
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS fact_blocks_id,
|
||||
SYSDATE() AS inserted_timestamp,
|
||||
SYSDATE() AS modified_timestamp
|
||||
FROM
|
||||
{{ ref('silver_testnet__blocks') }}
|
||||
WHERE 1=1
|
||||
|
||||
{% if is_incremental() %}
|
||||
AND modified_timestamp > (
|
||||
SELECT
|
||||
COALESCE(MAX(modified_timestamp), '1970-01-01' :: TIMESTAMP) AS modified_timestamp
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
66
models/testnet/core/gold/testnet__fact_blocks.yml
Normal file
66
models/testnet/core/gold/testnet__fact_blocks.yml
Normal file
@ -0,0 +1,66 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: testnet__fact_blocks
|
||||
description: '{{ doc("evm_blocks_table_doc") }}'
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
description: '{{ doc("evm_block_number") }}'
|
||||
- name: BLOCK_HASH
|
||||
description: '{{ doc("evm_blocks_hash") }}'
|
||||
- name: BLOCK_TIMESTAMP
|
||||
description: '{{ doc("evm_block_timestamp") }}'
|
||||
- name: NETWORK
|
||||
description: '{{ doc("evm_network") }}'
|
||||
- name: TX_COUNT
|
||||
description: '{{ doc("evm_tx_count") }}'
|
||||
- name: SIZE
|
||||
description: '{{ doc("evm_size") }}'
|
||||
- name: MINER
|
||||
description: '{{ doc("evm_miner") }}'
|
||||
- name: BASE_FEE_PER_GAS
|
||||
description: '{{ doc("evm_base_fee_per_gas") }}'
|
||||
- name: MIX_HASH
|
||||
description: '{{ doc("evm_mix_hash") }}'
|
||||
- name: EXTRA_DATA
|
||||
description: '{{ doc("evm_extra_data") }}'
|
||||
- name: PARENT_HASH
|
||||
description: '{{ doc("evm_parent_hash") }}'
|
||||
- name: GAS_USED
|
||||
description: '{{ doc("evm_gas_used") }}'
|
||||
- name: GAS_LIMIT
|
||||
description: '{{ doc("evm_gas_limit") }}'
|
||||
- name: DIFFICULTY
|
||||
description: '{{ doc("evm_difficulty") }}'
|
||||
- name: TOTAL_DIFFICULTY
|
||||
description: '{{ doc("evm_total_difficulty") }}'
|
||||
- name: SHA3_UNCLES
|
||||
description: '{{ doc("evm_sha3_uncles") }}'
|
||||
- name: UNCLE_BLOCKS
|
||||
description: '{{ doc("evm_uncle_blocks") }}'
|
||||
- name: NONCE
|
||||
description: '{{ doc("evm_blocks_nonce") }}'
|
||||
- name: RECEIPTS_ROOT
|
||||
description: '{{ doc("evm_receipts_root") }}'
|
||||
- name: STATE_ROOT
|
||||
description: '{{ doc("evm_state_root") }}'
|
||||
- name: TRANSACTIONS_ROOT
|
||||
description: '{{ doc("evm_transactions_root") }}'
|
||||
- name: LOGS_BLOOM
|
||||
description: '{{ doc("evm_logs_bloom") }}'
|
||||
- name: BLOB_GAS_USED
|
||||
description: '{{ doc("evm_blob_gas_used") }}'
|
||||
- name: EXCESS_BLOB_GAS
|
||||
description: '{{ doc("evm_excess_blob_gas") }}'
|
||||
- name: PARENT_BEACON_BLOCK_ROOT
|
||||
description: '{{ doc("evm_parent_beacon_block_root") }}'
|
||||
- name: WITHDRAWALS
|
||||
description: '{{ doc("evm_withdrawals") }}'
|
||||
- name: WITHDRAWALS_ROOT
|
||||
description: '{{ doc("evm_withdrawals_root") }}'
|
||||
- name: FACT_BLOCKS_ID
|
||||
description: '{{ doc("evm_pk") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("evm_inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: '{{ doc("evm_modified_timestamp") }}'
|
||||
217
models/testnet/core/gold/testnet__fact_event_logs.sql
Normal file
217
models/testnet/core/gold/testnet__fact_event_logs.sql
Normal file
@ -0,0 +1,217 @@
|
||||
{{ config (
|
||||
materialized = "incremental",
|
||||
incremental_strategy = 'delete+insert',
|
||||
unique_key = "block_number",
|
||||
cluster_by = ['block_timestamp::DATE'],
|
||||
tags = ['gold_testnet']
|
||||
) }}
|
||||
|
||||
WITH base AS (
|
||||
|
||||
SELECT
|
||||
block_number,
|
||||
receipts_json :transactionHash :: STRING AS tx_hash,
|
||||
receipts_json,
|
||||
receipts_json :logs AS full_logs
|
||||
FROM
|
||||
{{ ref('silver_testnet__receipts') }}
|
||||
WHERE
|
||||
1 = 1
|
||||
AND ARRAY_SIZE(receipts_json :logs) > 0
|
||||
|
||||
{% if is_incremental() %}
|
||||
AND modified_timestamp > (
|
||||
SELECT
|
||||
COALESCE(MAX(modified_timestamp), '1970-01-01' :: TIMESTAMP) AS modified_timestamp
|
||||
FROM
|
||||
{{ this }})
|
||||
{% endif %}
|
||||
),
|
||||
flattened_logs AS (
|
||||
SELECT
|
||||
block_number,
|
||||
tx_hash,
|
||||
receipts_json :from :: STRING AS origin_from_address,
|
||||
receipts_json :to :: STRING AS origin_to_address,
|
||||
CASE
|
||||
WHEN receipts_json :status :: STRING = '0x1' THEN TRUE
|
||||
WHEN receipts_json :status :: STRING = '0x0' THEN FALSE
|
||||
ELSE NULL
|
||||
END AS tx_succeeded,
|
||||
VALUE :address :: STRING AS contract_address,
|
||||
VALUE :blockHash :: STRING AS block_hash,
|
||||
VALUE :blockNumber :: STRING AS block_number_hex,
|
||||
VALUE :data :: STRING AS DATA,
|
||||
utils.udf_hex_to_int(
|
||||
VALUE :logIndex :: STRING
|
||||
) :: INT AS event_index,
|
||||
VALUE :removed :: BOOLEAN AS event_removed,
|
||||
VALUE :topics AS topics,
|
||||
VALUE :transactionHash :: STRING AS transaction_hash,
|
||||
utils.udf_hex_to_int(
|
||||
VALUE :transactionIndex :: STRING
|
||||
) :: INT AS transaction_index
|
||||
FROM
|
||||
base,
|
||||
LATERAL FLATTEN (
|
||||
input => full_logs
|
||||
)
|
||||
),
|
||||
new_logs AS (
|
||||
SELECT
|
||||
l.block_number,
|
||||
b.block_timestamp,
|
||||
l.tx_hash,
|
||||
l.transaction_index AS tx_position,
|
||||
l.event_index,
|
||||
l.contract_address,
|
||||
l.topics,
|
||||
l.topics [0] :: STRING AS topic_0,
|
||||
l.topics [1] :: STRING AS topic_1,
|
||||
l.topics [2] :: STRING AS topic_2,
|
||||
l.topics [3] :: STRING AS topic_3,
|
||||
l.data,
|
||||
l.event_removed,
|
||||
l.origin_from_address,
|
||||
l.origin_to_address,
|
||||
txs.origin_function_signature,
|
||||
l.tx_succeeded
|
||||
FROM
|
||||
flattened_logs l
|
||||
LEFT JOIN {{ ref('testnet__fact_blocks') }}
|
||||
b
|
||||
ON l.block_number = b.block_number
|
||||
|
||||
{% if is_incremental() %}
|
||||
AND b.modified_timestamp >= (
|
||||
SELECT
|
||||
MAX(modified_timestamp) :: DATE - 1
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
LEFT JOIN {{ ref('testnet__fact_transactions') }}
|
||||
txs
|
||||
ON l.tx_hash = txs.tx_hash
|
||||
AND l.block_number = txs.block_number
|
||||
|
||||
{% if is_incremental() %}
|
||||
AND txs.modified_timestamp >= (
|
||||
SELECT
|
||||
MAX(modified_timestamp) :: DATE - 1
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
)
|
||||
|
||||
{% if is_incremental() %},
|
||||
missing_data AS (
|
||||
SELECT
|
||||
t.block_number,
|
||||
b.block_timestamp AS block_timestamp_heal,
|
||||
t.tx_hash,
|
||||
t.tx_position,
|
||||
t.event_index,
|
||||
t.contract_address,
|
||||
t.topics,
|
||||
t.topic_0,
|
||||
t.topic_1,
|
||||
t.topic_2,
|
||||
t.topic_3,
|
||||
t.data,
|
||||
t.event_removed,
|
||||
t.origin_from_address,
|
||||
t.origin_to_address,
|
||||
txs.origin_function_signature AS origin_function_signature_heal,
|
||||
t.tx_succeeded
|
||||
FROM
|
||||
{{ this }}
|
||||
t
|
||||
LEFT JOIN {{ ref('testnet__fact_transactions') }}
|
||||
txs
|
||||
ON t.tx_hash = txs.tx_hash
|
||||
AND t.block_number = txs.block_number
|
||||
LEFT JOIN {{ ref('testnet__fact_blocks') }}
|
||||
b
|
||||
ON t.block_number = b.block_number
|
||||
WHERE
|
||||
t.block_timestamp IS NULL
|
||||
OR t.origin_function_signature IS NULL
|
||||
)
|
||||
{% endif %},
|
||||
all_logs AS (
|
||||
SELECT
|
||||
block_number,
|
||||
block_timestamp,
|
||||
tx_hash,
|
||||
tx_position,
|
||||
event_index,
|
||||
contract_address,
|
||||
topics,
|
||||
topic_0,
|
||||
topic_1,
|
||||
topic_2,
|
||||
topic_3,
|
||||
DATA,
|
||||
event_removed,
|
||||
origin_from_address,
|
||||
origin_to_address,
|
||||
origin_function_signature,
|
||||
tx_succeeded
|
||||
FROM
|
||||
new_logs
|
||||
|
||||
{% if is_incremental() %}
|
||||
UNION ALL
|
||||
SELECT
|
||||
block_number,
|
||||
block_timestamp_heal AS block_timestamp,
|
||||
tx_hash,
|
||||
tx_position,
|
||||
event_index,
|
||||
contract_address,
|
||||
topics,
|
||||
topic_0,
|
||||
topic_1,
|
||||
topic_2,
|
||||
topic_3,
|
||||
DATA,
|
||||
event_removed,
|
||||
origin_from_address,
|
||||
origin_to_address,
|
||||
origin_function_signature_heal AS origin_function_signature,
|
||||
tx_succeeded
|
||||
FROM
|
||||
missing_data
|
||||
{% endif %}
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
block_timestamp,
|
||||
tx_hash,
|
||||
tx_position,
|
||||
event_index,
|
||||
contract_address,
|
||||
topics,
|
||||
topic_0,
|
||||
topic_1,
|
||||
topic_2,
|
||||
topic_3,
|
||||
DATA,
|
||||
event_removed,
|
||||
origin_from_address,
|
||||
origin_to_address,
|
||||
origin_function_signature,
|
||||
tx_succeeded,
|
||||
{{ dbt_utils.generate_surrogate_key(['tx_hash','event_index']) }} AS fact_event_logs_id,
|
||||
SYSDATE() AS inserted_timestamp,
|
||||
SYSDATE() AS modified_timestamp
|
||||
FROM
|
||||
all_logs qualify ROW_NUMBER() over (
|
||||
PARTITION BY fact_event_logs_id
|
||||
ORDER BY
|
||||
block_number DESC,
|
||||
block_timestamp DESC nulls last,
|
||||
origin_function_signature DESC nulls last
|
||||
) = 1
|
||||
46
models/testnet/core/gold/testnet__fact_event_logs.yml
Normal file
46
models/testnet/core/gold/testnet__fact_event_logs.yml
Normal file
@ -0,0 +1,46 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: testnet__fact_event_logs
|
||||
description: '{{ doc("evm_logs_table_doc") }}'
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
description: '{{ doc("evm_block_number") }}'
|
||||
- name: BLOCK_TIMESTAMP
|
||||
description: '{{ doc("evm_block_timestamp") }}'
|
||||
- name: TX_HASH
|
||||
description: '{{ doc("evm_tx_hash") }}'
|
||||
- name: TX_POSITION
|
||||
description: '{{ doc("evm_tx_position") }}'
|
||||
- name: EVENT_INDEX
|
||||
description: '{{ doc("evm_event_index") }}'
|
||||
- name: CONTRACT_ADDRESS
|
||||
description: '{{ doc("evm_logs_contract_address") }}'
|
||||
- name: TOPICS
|
||||
description: '{{ doc("evm_topics") }}'
|
||||
- name: TOPIC_0
|
||||
description: '{{ doc("evm_topic_0") }}'
|
||||
- name: TOPIC_1
|
||||
description: '{{ doc("evm_topic_1") }}'
|
||||
- name: TOPIC_2
|
||||
description: '{{ doc("evm_topic_2") }}'
|
||||
- name: TOPIC_3
|
||||
description: '{{ doc("evm_topic_3") }}'
|
||||
- name: DATA
|
||||
description: '{{ doc("evm_logs_data") }}'
|
||||
- name: EVENT_REMOVED
|
||||
description: '{{ doc("evm_event_removed") }}'
|
||||
- name: ORIGIN_FROM_ADDRESS
|
||||
description: '{{ doc("evm_from_address") }}'
|
||||
- name: ORIGIN_TO_ADDRESS
|
||||
description: '{{ doc("evm_to_address") }}'
|
||||
- name: ORIGIN_FUNCTION_SIGNATURE
|
||||
description: '{{ doc("evm_origin_sig") }}'
|
||||
- name: TX_SUCCEEDED
|
||||
description: '{{ doc("evm_tx_succeeded") }}'
|
||||
- name: FACT_EVENT_LOGS_ID
|
||||
description: '{{ doc("evm_pk") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("evm_inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: '{{ doc("evm_modified_timestamp") }}'
|
||||
420
models/testnet/core/gold/testnet__fact_traces.sql
Normal file
420
models/testnet/core/gold/testnet__fact_traces.sql
Normal file
@ -0,0 +1,420 @@
|
||||
{{ config (
|
||||
materialized = "incremental",
|
||||
incremental_strategy = 'delete+insert',
|
||||
unique_key = "block_number",
|
||||
cluster_by = ['block_timestamp::DATE'],
|
||||
tags = ['gold_testnet']
|
||||
) }}
|
||||
|
||||
WITH silver_traces AS (
|
||||
SELECT
|
||||
block_number,
|
||||
tx_position,
|
||||
trace_address,
|
||||
parent_trace_address,
|
||||
trace_address_array,
|
||||
trace_json,
|
||||
traces_id,
|
||||
'regular' AS source
|
||||
FROM
|
||||
{{ ref(
|
||||
'silver_testnet__traces'
|
||||
) }}
|
||||
WHERE
|
||||
1 = 1
|
||||
|
||||
{% if is_incremental() %}
|
||||
AND modified_timestamp > (
|
||||
SELECT
|
||||
COALESCE(MAX(modified_timestamp), '1970-01-01' :: TIMESTAMP) AS modified_timestamp
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
|
||||
),
|
||||
sub_traces AS (
|
||||
SELECT
|
||||
block_number,
|
||||
tx_position,
|
||||
parent_trace_address,
|
||||
COUNT(*) AS sub_traces
|
||||
FROM
|
||||
silver_traces
|
||||
GROUP BY
|
||||
block_number,
|
||||
tx_position,
|
||||
parent_trace_address
|
||||
),
|
||||
trace_index_array AS (
|
||||
SELECT
|
||||
block_number,
|
||||
tx_position,
|
||||
trace_address,
|
||||
ARRAY_AGG(flat_value) AS number_array
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
block_number,
|
||||
tx_position,
|
||||
trace_address,
|
||||
IFF(
|
||||
VALUE :: STRING = 'ORIGIN',
|
||||
-1,
|
||||
VALUE :: INT
|
||||
) AS flat_value
|
||||
FROM
|
||||
silver_traces,
|
||||
LATERAL FLATTEN (
|
||||
input => trace_address_array
|
||||
)
|
||||
)
|
||||
GROUP BY
|
||||
block_number,
|
||||
tx_position,
|
||||
trace_address
|
||||
),
|
||||
trace_index_sub_traces AS (
|
||||
SELECT
|
||||
b.block_number,
|
||||
b.tx_position,
|
||||
b.trace_address,
|
||||
IFNULL(
|
||||
sub_traces,
|
||||
0
|
||||
) AS sub_traces,
|
||||
number_array,
|
||||
ROW_NUMBER() over (
|
||||
PARTITION BY b.block_number, b.tx_position
|
||||
ORDER BY
|
||||
number_array ASC
|
||||
) - 1 AS trace_index,
|
||||
b.trace_json,
|
||||
b.traces_id,
|
||||
b.source
|
||||
FROM
|
||||
silver_traces b
|
||||
LEFT JOIN sub_traces s
|
||||
ON b.block_number = s.block_number
|
||||
AND b.tx_position = s.tx_position
|
||||
AND b.trace_address = s.parent_trace_address
|
||||
JOIN trace_index_array n
|
||||
ON b.block_number = n.block_number
|
||||
AND b.tx_position = n.tx_position
|
||||
AND b.trace_address = n.trace_address
|
||||
),
|
||||
errored_traces AS (
|
||||
SELECT
|
||||
block_number,
|
||||
tx_position,
|
||||
trace_address,
|
||||
trace_json
|
||||
FROM
|
||||
trace_index_sub_traces
|
||||
WHERE
|
||||
trace_json :error :: STRING IS NOT NULL
|
||||
),
|
||||
error_logic AS (
|
||||
SELECT
|
||||
b0.block_number,
|
||||
b0.tx_position,
|
||||
b0.trace_address,
|
||||
b0.trace_json :error :: STRING AS error,
|
||||
b1.trace_json :error :: STRING AS any_error,
|
||||
b2.trace_json :error :: STRING AS origin_error
|
||||
FROM
|
||||
trace_index_sub_traces b0
|
||||
LEFT JOIN errored_traces b1
|
||||
ON b0.block_number = b1.block_number
|
||||
AND b0.tx_position = b1.tx_position
|
||||
AND b0.trace_address RLIKE CONCAT('^', b1.trace_address, '(_[0-9]+)*$')
|
||||
LEFT JOIN errored_traces b2
|
||||
ON b0.block_number = b2.block_number
|
||||
AND b0.tx_position = b2.tx_position
|
||||
AND b2.trace_address = 'ORIGIN'
|
||||
),
|
||||
aggregated_errors AS (
|
||||
SELECT
|
||||
block_number,
|
||||
tx_position,
|
||||
trace_address,
|
||||
error,
|
||||
IFF(MAX(any_error) IS NULL
|
||||
AND error IS NULL
|
||||
AND origin_error IS NULL, TRUE, FALSE) AS trace_succeeded
|
||||
FROM
|
||||
error_logic
|
||||
GROUP BY
|
||||
block_number,
|
||||
tx_position,
|
||||
trace_address,
|
||||
error,
|
||||
origin_error
|
||||
),
|
||||
json_traces AS (
|
||||
SELECT
|
||||
block_number,
|
||||
tx_position,
|
||||
trace_address,
|
||||
sub_traces,
|
||||
number_array,
|
||||
trace_index,
|
||||
trace_succeeded,
|
||||
trace_json :error :: STRING AS error_reason,
|
||||
trace_json :revertReason :: STRING AS revert_reason,
|
||||
trace_json :from :: STRING AS from_address,
|
||||
trace_json :to :: STRING AS to_address,
|
||||
IFNULL(
|
||||
trace_json :value :: STRING,
|
||||
'0x0'
|
||||
) AS value_hex,
|
||||
IFNULL(
|
||||
utils.udf_hex_to_int(
|
||||
trace_json :value :: STRING
|
||||
),
|
||||
'0'
|
||||
) AS value_precise_raw,
|
||||
utils.udf_decimal_adjust(
|
||||
value_precise_raw,
|
||||
18
|
||||
) AS value_precise,
|
||||
value_precise :: FLOAT AS VALUE,
|
||||
utils.udf_hex_to_int(
|
||||
trace_json :gas :: STRING
|
||||
) :: INT AS gas,
|
||||
utils.udf_hex_to_int(
|
||||
trace_json :gasUsed :: STRING
|
||||
) :: INT AS gas_used,
|
||||
trace_json :input :: STRING AS input,
|
||||
trace_json :output :: STRING AS output,
|
||||
trace_json :type :: STRING AS TYPE,
|
||||
traces_id
|
||||
FROM
|
||||
trace_index_sub_traces
|
||||
JOIN aggregated_errors USING (
|
||||
block_number,
|
||||
tx_position,
|
||||
trace_address
|
||||
)
|
||||
),
|
||||
incremental_traces AS (
|
||||
SELECT
|
||||
f.block_number,
|
||||
t.tx_hash,
|
||||
t.block_timestamp,
|
||||
t.origin_function_signature,
|
||||
t.from_address AS origin_from_address,
|
||||
t.to_address AS origin_to_address,
|
||||
t.tx_position AS tx_position,
|
||||
f.trace_index,
|
||||
f.from_address AS from_address,
|
||||
f.to_address AS to_address,
|
||||
f.value_hex,
|
||||
f.value_precise_raw,
|
||||
f.value_precise,
|
||||
f.value,
|
||||
f.gas,
|
||||
f.gas_used,
|
||||
f.input,
|
||||
f.output,
|
||||
f.type,
|
||||
f.sub_traces,
|
||||
f.error_reason,
|
||||
f.revert_reason,
|
||||
f.traces_id,
|
||||
f.trace_succeeded,
|
||||
f.trace_address,
|
||||
t.tx_succeeded
|
||||
FROM
|
||||
json_traces f
|
||||
LEFT OUTER JOIN {{ ref('testnet__fact_transactions') }}
|
||||
t
|
||||
ON f.tx_position = t.tx_position
|
||||
AND f.block_number = t.block_number
|
||||
|
||||
{% if is_incremental() %}
|
||||
AND t.modified_timestamp >= (
|
||||
SELECT
|
||||
DATEADD('hour', -24, MAX(modified_timestamp))
|
||||
FROM
|
||||
{{ this }})
|
||||
{% endif %}
|
||||
)
|
||||
|
||||
{% if is_incremental() %},
|
||||
overflow_blocks AS (
|
||||
SELECT
|
||||
DISTINCT block_number
|
||||
FROM
|
||||
silver_traces
|
||||
WHERE
|
||||
source = 'overflow'
|
||||
),
|
||||
heal_missing_data AS (
|
||||
SELECT
|
||||
t.block_number,
|
||||
txs.tx_hash,
|
||||
txs.block_timestamp AS block_timestamp_heal,
|
||||
txs.origin_function_signature AS origin_function_signature_heal,
|
||||
txs.from_address AS origin_from_address_heal,
|
||||
txs.to_address AS origin_to_address_heal,
|
||||
t.tx_position,
|
||||
t.trace_index,
|
||||
t.from_address,
|
||||
t.to_address,
|
||||
t.value_hex,
|
||||
t.value_precise_raw,
|
||||
t.value_precise,
|
||||
t.value,
|
||||
t.gas,
|
||||
t.gas_used,
|
||||
t.input,
|
||||
t.output,
|
||||
t.type,
|
||||
t.sub_traces,
|
||||
t.error_reason,
|
||||
t.revert_reason,
|
||||
t.fact_traces_id AS traces_id,
|
||||
t.trace_succeeded,
|
||||
t.trace_address,
|
||||
txs.tx_succeeded AS tx_succeeded_heal
|
||||
FROM
|
||||
{{ this }}
|
||||
t
|
||||
JOIN {{ ref('testnet__fact_transactions') }}
|
||||
txs
|
||||
ON t.tx_position = txs.tx_position
|
||||
AND t.block_number = txs.block_number
|
||||
WHERE
|
||||
t.tx_position IS NULL
|
||||
OR t.block_timestamp IS NULL
|
||||
OR t.tx_succeeded IS NULL
|
||||
)
|
||||
{% endif %},
|
||||
all_traces AS (
|
||||
SELECT
|
||||
block_number,
|
||||
tx_hash,
|
||||
block_timestamp,
|
||||
origin_function_signature,
|
||||
origin_from_address,
|
||||
origin_to_address,
|
||||
tx_position,
|
||||
trace_index,
|
||||
from_address,
|
||||
to_address,
|
||||
value_hex,
|
||||
value_precise_raw,
|
||||
value_precise,
|
||||
VALUE,
|
||||
gas,
|
||||
gas_used,
|
||||
input,
|
||||
output,
|
||||
TYPE,
|
||||
sub_traces,
|
||||
error_reason,
|
||||
revert_reason,
|
||||
trace_succeeded,
|
||||
trace_address,
|
||||
tx_succeeded
|
||||
FROM
|
||||
incremental_traces
|
||||
|
||||
{% if is_incremental() %}
|
||||
UNION ALL
|
||||
SELECT
|
||||
block_number,
|
||||
tx_hash,
|
||||
block_timestamp_heal AS block_timestamp,
|
||||
origin_function_signature_heal AS origin_function_signature,
|
||||
origin_from_address_heal AS origin_from_address,
|
||||
origin_to_address_heal AS origin_to_address,
|
||||
tx_position,
|
||||
trace_index,
|
||||
from_address,
|
||||
to_address,
|
||||
value_hex,
|
||||
value_precise_raw,
|
||||
value_precise,
|
||||
VALUE,
|
||||
gas,
|
||||
gas_used,
|
||||
input,
|
||||
output,
|
||||
TYPE,
|
||||
sub_traces,
|
||||
error_reason,
|
||||
revert_reason,
|
||||
trace_succeeded,
|
||||
trace_address,
|
||||
tx_succeeded_heal AS tx_succeeded
|
||||
FROM
|
||||
heal_missing_data
|
||||
UNION ALL
|
||||
SELECT
|
||||
block_number,
|
||||
tx_hash,
|
||||
block_timestamp,
|
||||
origin_function_signature,
|
||||
origin_from_address,
|
||||
origin_to_address,
|
||||
tx_position,
|
||||
trace_index,
|
||||
from_address,
|
||||
to_address,
|
||||
value_hex,
|
||||
value_precise_raw,
|
||||
value_precise,
|
||||
VALUE,
|
||||
gas,
|
||||
gas_used,
|
||||
input,
|
||||
output,
|
||||
TYPE,
|
||||
sub_traces,
|
||||
error_reason,
|
||||
revert_reason,
|
||||
trace_succeeded,
|
||||
trace_address,
|
||||
tx_succeeded
|
||||
FROM
|
||||
{{ this }}
|
||||
JOIN overflow_blocks USING (block_number)
|
||||
{% endif %}
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
block_timestamp,
|
||||
tx_hash,
|
||||
tx_position,
|
||||
trace_index,
|
||||
from_address,
|
||||
to_address,
|
||||
input,
|
||||
output,
|
||||
TYPE,
|
||||
trace_address,
|
||||
sub_traces,
|
||||
VALUE,
|
||||
value_precise_raw,
|
||||
value_precise,
|
||||
value_hex,
|
||||
gas,
|
||||
gas_used,
|
||||
origin_from_address,
|
||||
origin_to_address,
|
||||
origin_function_signature,
|
||||
trace_succeeded,
|
||||
error_reason,
|
||||
revert_reason,
|
||||
tx_succeeded,
|
||||
{{ dbt_utils.generate_surrogate_key(
|
||||
['tx_hash', 'trace_index']
|
||||
) }} AS fact_traces_id,
|
||||
SYSDATE() AS inserted_timestamp,
|
||||
SYSDATE() AS modified_timestamp
|
||||
FROM
|
||||
all_traces qualify(ROW_NUMBER() over(PARTITION BY block_number, tx_position, trace_index
|
||||
ORDER BY
|
||||
modified_timestamp DESC, block_timestamp DESC nulls last)) = 1
|
||||
62
models/testnet/core/gold/testnet__fact_traces.yml
Normal file
62
models/testnet/core/gold/testnet__fact_traces.yml
Normal file
@ -0,0 +1,62 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: testnet__fact_traces
|
||||
description: '{{ doc("evm_traces_table_doc") }}'
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
description: '{{ doc("evm_block_number") }}'
|
||||
- name: BLOCK_TIMESTAMP
|
||||
description: '{{ doc("evm_block_timestamp") }}'
|
||||
- name: TX_HASH
|
||||
description: '{{ doc("evm_tx_hash") }}'
|
||||
- name: TX_POSITION
|
||||
description: '{{ doc("evm_tx_position") }}'
|
||||
- name: TRACE_INDEX
|
||||
description: '{{ doc("evm_trace_index") }}'
|
||||
- name: FROM_ADDRESS
|
||||
description: '{{ doc("evm_from_address") }}'
|
||||
- name: TO_ADDRESS
|
||||
description: '{{ doc("evm_to_address") }}'
|
||||
- name: INPUT
|
||||
description: '{{ doc("evm_traces_input") }}'
|
||||
- name: OUTPUT
|
||||
description: '{{ doc("evm_traces_output") }}'
|
||||
- name: TYPE
|
||||
description: '{{ doc("evm_traces_type") }}'
|
||||
- name: TRACE_ADDRESS
|
||||
description: '{{ doc("evm_trace_address") }}'
|
||||
- name: SUB_TRACES
|
||||
description: '{{ doc("evm_sub_traces") }}'
|
||||
- name: VALUE
|
||||
description: '{{ doc("evm_value") }}'
|
||||
- name: VALUE_PRECISE_RAW
|
||||
description: '{{ doc("evm_precise_amount_unadjusted") }}'
|
||||
- name: VALUE_PRECISE
|
||||
description: '{{ doc("evm_precise_amount_adjusted") }}'
|
||||
- name: VALUE_HEX
|
||||
description: '{{ doc("evm_value_hex") }}'
|
||||
- name: GAS
|
||||
description: '{{ doc("evm_traces_gas") }}'
|
||||
- name: GAS_USED
|
||||
description: '{{ doc("evm_traces_gas_used") }}'
|
||||
- name: ORIGIN_FROM_ADDRESS
|
||||
description: '{{ doc("evm_traces_from") }}'
|
||||
- name: ORIGIN_TO_ADDRESS
|
||||
description: '{{ doc("evm_traces_to") }}'
|
||||
- name: ORIGIN_FUNCTION_SIGNATURE
|
||||
description: '{{ doc("evm_origin_sig") }}'
|
||||
- name: TRACE_SUCCEEDED
|
||||
description: '{{ doc("evm_trace_succeeded") }}'
|
||||
- name: ERROR_REASON
|
||||
description: '{{ doc("evm_trace_error_reason") }}'
|
||||
- name: REVERT_REASON
|
||||
description: '{{ doc("evm_revert_reason") }}'
|
||||
- name: TX_SUCCEEDED
|
||||
description: '{{ doc("evm_tx_succeeded") }}'
|
||||
- name: FACT_TRACES_ID
|
||||
description: '{{ doc("evm_pk") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("evm_inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: '{{ doc("evm_modified_timestamp") }}'
|
||||
346
models/testnet/core/gold/testnet__fact_transactions.sql
Normal file
346
models/testnet/core/gold/testnet__fact_transactions.sql
Normal file
@ -0,0 +1,346 @@
|
||||
{{ config (
|
||||
materialized = "incremental",
|
||||
incremental_strategy = 'delete+insert',
|
||||
unique_key = "block_number",
|
||||
cluster_by = ['block_timestamp::DATE'],
|
||||
tags = ['gold_testnet']
|
||||
) }}
|
||||
|
||||
WITH base AS (
|
||||
|
||||
SELECT
|
||||
block_number,
|
||||
tx_position,
|
||||
transaction_json
|
||||
FROM
|
||||
{{ ref('silver_testnet__transactions') }}
|
||||
|
||||
{% if is_incremental() %}
|
||||
WHERE
|
||||
modified_timestamp > (
|
||||
SELECT
|
||||
COALESCE(MAX(modified_timestamp), '1970-01-01' :: TIMESTAMP) AS modified_timestamp
|
||||
FROM
|
||||
{{ this }})
|
||||
{% endif %}
|
||||
),
|
||||
transactions_fields AS (
|
||||
SELECT
|
||||
block_number,
|
||||
tx_position,
|
||||
transaction_json :blockHash :: STRING AS block_hash,
|
||||
transaction_json :blockNumber :: STRING AS block_number_hex,
|
||||
transaction_json :from :: STRING AS from_address,
|
||||
utils.udf_hex_to_int(
|
||||
transaction_json :gas :: STRING
|
||||
) :: bigint AS gas_limit,
|
||||
utils.udf_hex_to_int(
|
||||
transaction_json :gasPrice :: STRING
|
||||
) :: bigint AS gas_price,
|
||||
transaction_json :hash :: STRING AS tx_hash,
|
||||
transaction_json :input :: STRING AS input_data,
|
||||
LEFT(
|
||||
input_data,
|
||||
10
|
||||
) AS origin_function_signature,
|
||||
utils.udf_hex_to_int(
|
||||
transaction_json :nonce :: STRING
|
||||
) :: bigint AS nonce,
|
||||
transaction_json :r :: STRING AS r,
|
||||
transaction_json :s :: STRING AS s,
|
||||
transaction_json :to :: STRING AS to_address1,
|
||||
CASE
|
||||
WHEN to_address1 = '' THEN NULL
|
||||
ELSE to_address1
|
||||
END AS to_address,
|
||||
utils.udf_hex_to_int(
|
||||
transaction_json :transactionIndex :: STRING
|
||||
) :: bigint AS transaction_index,
|
||||
utils.udf_hex_to_int(
|
||||
transaction_json :type :: STRING
|
||||
) :: bigint AS tx_type,
|
||||
utils.udf_hex_to_int(
|
||||
transaction_json :v :: STRING
|
||||
) :: bigint AS v,
|
||||
TRY_TO_NUMBER(
|
||||
utils.udf_hex_to_int(
|
||||
transaction_json :maxFeePerGas :: STRING
|
||||
)
|
||||
) / pow(
|
||||
10,
|
||||
9
|
||||
) AS max_fee_per_gas,
|
||||
TRY_TO_NUMBER(
|
||||
utils.udf_hex_to_int(
|
||||
transaction_json :maxPriorityFeePerGas :: STRING
|
||||
)
|
||||
) / pow(
|
||||
10,
|
||||
9
|
||||
) AS max_priority_fee_per_gas,
|
||||
utils.udf_hex_to_int(
|
||||
transaction_json :value :: STRING
|
||||
) AS value_precise_raw,
|
||||
utils.udf_decimal_adjust(
|
||||
value_precise_raw,
|
||||
18
|
||||
) AS value_precise,
|
||||
value_precise :: FLOAT AS VALUE,
|
||||
utils.udf_hex_to_int(transaction_json :yParity :: STRING):: bigint AS y_parity,
|
||||
transaction_json :accessList AS access_list
|
||||
FROM
|
||||
base
|
||||
),
|
||||
new_transactions AS (
|
||||
SELECT
|
||||
txs.block_number,
|
||||
txs.block_hash,
|
||||
b.block_timestamp,
|
||||
txs.tx_hash,
|
||||
txs.from_address,
|
||||
txs.to_address,
|
||||
txs.origin_function_signature,
|
||||
txs.value,
|
||||
txs.value_precise_raw,
|
||||
txs.value_precise,
|
||||
txs.max_fee_per_gas,
|
||||
txs.max_priority_fee_per_gas,
|
||||
txs.y_parity,
|
||||
txs.access_list,
|
||||
utils.udf_decimal_adjust(
|
||||
txs.gas_price * utils.udf_hex_to_int(
|
||||
r.receipts_json :gasUsed :: STRING
|
||||
) :: bigint,
|
||||
18
|
||||
) AS tx_fee_precise,
|
||||
COALESCE(
|
||||
tx_fee_precise :: FLOAT,
|
||||
0
|
||||
) AS tx_fee,
|
||||
CASE
|
||||
WHEN r.receipts_json :status :: STRING = '0x1' THEN TRUE
|
||||
WHEN r.receipts_json :status :: STRING = '0x0' THEN FALSE
|
||||
ELSE NULL
|
||||
END AS tx_succeeded,
|
||||
txs.tx_type,
|
||||
txs.nonce,
|
||||
txs.tx_position,
|
||||
txs.input_data,
|
||||
txs.gas_price / pow(
|
||||
10,
|
||||
9
|
||||
) AS gas_price,
|
||||
utils.udf_hex_to_int(
|
||||
r.receipts_json :gasUsed :: STRING
|
||||
) :: bigint AS gas_used,
|
||||
txs.gas_limit,
|
||||
utils.udf_hex_to_int(
|
||||
r.receipts_json :cumulativeGasUsed :: STRING
|
||||
) :: bigint AS cumulative_gas_used,
|
||||
utils.udf_hex_to_int(
|
||||
r.receipts_json :effectiveGasPrice :: STRING
|
||||
) :: bigint AS effective_gas_price,
|
||||
txs.r,
|
||||
txs.s,
|
||||
txs.v
|
||||
FROM
|
||||
transactions_fields txs
|
||||
LEFT JOIN {{ ref('testnet__fact_blocks') }}
|
||||
b
|
||||
ON txs.block_number = b.block_number
|
||||
|
||||
{% if is_incremental() %}
|
||||
AND b.modified_timestamp >= (
|
||||
SELECT
|
||||
MAX(modified_timestamp) :: DATE - 1
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
LEFT JOIN {{ ref('silver_testnet__receipts') }}
|
||||
r
|
||||
ON txs.block_number = r.block_number
|
||||
AND txs.tx_hash = r.receipts_json :transactionHash :: STRING
|
||||
|
||||
{% if is_incremental() %}
|
||||
AND r.modified_timestamp >= (
|
||||
SELECT
|
||||
MAX(modified_timestamp) :: DATE - 1
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
)
|
||||
|
||||
{% if is_incremental() %},
|
||||
missing_data AS (
|
||||
SELECT
|
||||
t.block_number,
|
||||
b.block_timestamp AS block_timestamp_heal,
|
||||
t.tx_hash,
|
||||
t.from_address,
|
||||
t.to_address,
|
||||
t.origin_function_signature,
|
||||
t.value,
|
||||
t.value_precise_raw,
|
||||
t.value_precise,
|
||||
t.max_fee_per_gas,
|
||||
t.max_priority_fee_per_gas,
|
||||
t.y_parity,
|
||||
t.access_list,
|
||||
utils.udf_decimal_adjust(
|
||||
t.gas_price * utils.udf_hex_to_int(
|
||||
r.receipts_json :gasUsed :: STRING
|
||||
) :: bigint,
|
||||
9
|
||||
) AS tx_fee_precise_heal,
|
||||
COALESCE(
|
||||
tx_fee_precise_heal :: FLOAT,
|
||||
0
|
||||
) AS tx_fee_heal,
|
||||
CASE
|
||||
WHEN r.receipts_json :status :: STRING = '0x1' THEN TRUE
|
||||
WHEN r.receipts_json :status :: STRING = '0x0' THEN FALSE
|
||||
ELSE NULL
|
||||
END AS tx_succeeded_heal,
|
||||
t.tx_type,
|
||||
t.nonce,
|
||||
t.tx_position,
|
||||
t.input_data,
|
||||
t.gas_price,
|
||||
utils.udf_hex_to_int(
|
||||
r.receipts_json :gasUsed :: STRING
|
||||
) :: bigint AS gas_used_heal,
|
||||
t.gas_limit,
|
||||
utils.udf_hex_to_int(
|
||||
r.receipts_json :cumulativeGasUsed :: STRING
|
||||
) :: bigint AS cumulative_gas_used_heal,
|
||||
utils.udf_hex_to_int(
|
||||
r.receipts_json :effectiveGasPrice :: STRING
|
||||
) :: bigint AS effective_gas_price_heal,
|
||||
t.r,
|
||||
t.s,
|
||||
t.v
|
||||
FROM
|
||||
{{ this }}
|
||||
t
|
||||
LEFT JOIN {{ ref('testnet__fact_blocks') }}
|
||||
b
|
||||
ON t.block_number = b.block_number
|
||||
LEFT JOIN {{ ref('silver_testnet__receipts') }}
|
||||
r
|
||||
ON t.block_number = r.block_number
|
||||
AND t.tx_hash = r.receipts_json :transactionHash :: STRING
|
||||
WHERE
|
||||
t.block_timestamp IS NULL
|
||||
OR t.tx_succeeded IS NULL
|
||||
)
|
||||
{% endif %},
|
||||
all_transactions AS (
|
||||
SELECT
|
||||
block_number,
|
||||
block_timestamp,
|
||||
tx_hash,
|
||||
from_address,
|
||||
to_address,
|
||||
origin_function_signature,
|
||||
VALUE,
|
||||
value_precise_raw,
|
||||
value_precise,
|
||||
max_fee_per_gas,
|
||||
max_priority_fee_per_gas,
|
||||
y_parity,
|
||||
access_list,
|
||||
tx_fee,
|
||||
tx_fee_precise,
|
||||
tx_succeeded,
|
||||
tx_type,
|
||||
nonce,
|
||||
tx_position,
|
||||
input_data,
|
||||
gas_price,
|
||||
gas_used,
|
||||
gas_limit,
|
||||
cumulative_gas_used,
|
||||
effective_gas_price,
|
||||
r,
|
||||
s,
|
||||
v
|
||||
FROM
|
||||
new_transactions
|
||||
|
||||
{% if is_incremental() %}
|
||||
UNION ALL
|
||||
SELECT
|
||||
block_number,
|
||||
block_timestamp_heal AS block_timestamp,
|
||||
tx_hash,
|
||||
from_address,
|
||||
to_address,
|
||||
origin_function_signature,
|
||||
VALUE,
|
||||
value_precise_raw,
|
||||
value_precise,
|
||||
max_fee_per_gas,
|
||||
max_priority_fee_per_gas,
|
||||
y_parity,
|
||||
access_list,
|
||||
tx_fee_heal AS tx_fee,
|
||||
tx_fee_precise_heal AS tx_fee_precise,
|
||||
tx_succeeded_heal AS tx_succeeded,
|
||||
tx_type,
|
||||
nonce,
|
||||
tx_position,
|
||||
input_data,
|
||||
gas_price,
|
||||
gas_used_heal AS gas_used,
|
||||
gas_limit,
|
||||
cumulative_gas_used_heal AS cumulative_gas_used,
|
||||
effective_gas_price_heal AS effective_gas_price,
|
||||
r,
|
||||
s,
|
||||
v
|
||||
FROM
|
||||
missing_data
|
||||
{% endif %}
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
block_timestamp,
|
||||
tx_hash,
|
||||
from_address,
|
||||
to_address,
|
||||
origin_function_signature,
|
||||
VALUE,
|
||||
value_precise_raw,
|
||||
value_precise,
|
||||
tx_fee,
|
||||
tx_fee_precise,
|
||||
tx_succeeded,
|
||||
tx_type,
|
||||
nonce,
|
||||
tx_position,
|
||||
input_data,
|
||||
gas_price,
|
||||
gas_used,
|
||||
gas_limit,
|
||||
cumulative_gas_used,
|
||||
effective_gas_price,
|
||||
max_fee_per_gas,
|
||||
max_priority_fee_per_gas,
|
||||
y_parity,
|
||||
access_list,
|
||||
r,
|
||||
s,
|
||||
v,
|
||||
{{ dbt_utils.generate_surrogate_key(['tx_hash']) }} AS fact_transactions_id,
|
||||
SYSDATE() AS inserted_timestamp,
|
||||
SYSDATE() AS modified_timestamp
|
||||
FROM
|
||||
all_transactions qualify ROW_NUMBER() over (
|
||||
PARTITION BY fact_transactions_id
|
||||
ORDER BY
|
||||
block_number DESC,
|
||||
block_timestamp DESC nulls last,
|
||||
tx_succeeded DESC nulls last
|
||||
) = 1
|
||||
72
models/testnet/core/gold/testnet__fact_transactions.yml
Normal file
72
models/testnet/core/gold/testnet__fact_transactions.yml
Normal file
@ -0,0 +1,72 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: testnet__fact_transactions
|
||||
description: '{{ doc("evm_tx_table_doc") }}'
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
description: '{{ doc("evm_block_number") }}'
|
||||
- name: BLOCK_TIMESTAMP
|
||||
description: '{{ doc("evm_block_timestamp") }}'
|
||||
- name: TX_HASH
|
||||
description: '{{ doc("evm_tx_hash") }}'
|
||||
- name: FROM_ADDRESS
|
||||
description: '{{ doc("evm_from_address") }}'
|
||||
- name: TO_ADDRESS
|
||||
description: '{{ doc("evm_to_address") }}'
|
||||
- name: ORIGIN_FUNCTION_SIGNATURE
|
||||
description: '{{ doc("evm_tx_origin_sig") }}'
|
||||
- name: VALUE
|
||||
description: '{{ doc("evm_value") }}'
|
||||
- name: VALUE_PRECISE_RAW
|
||||
description: '{{ doc("evm_precise_amount_unadjusted") }}'
|
||||
- name: VALUE_PRECISE
|
||||
description: '{{ doc("evm_precise_amount_adjusted") }}'
|
||||
- name: TX_FEE
|
||||
description: '{{ doc("evm_tx_fee") }}'
|
||||
- name: TX_FEE_PRECISE
|
||||
description: '{{ doc("evm_tx_fee_precise") }}'
|
||||
- name: TX_SUCCEEDED
|
||||
description: '{{ doc("evm_tx_succeeded") }}'
|
||||
- name: TX_TYPE
|
||||
description: '{{ doc("evm_tx_type") }}'
|
||||
- name: NONCE
|
||||
description: '{{ doc("evm_tx_nonce") }}'
|
||||
- name: TX_POSITION
|
||||
description: '{{ doc("evm_tx_position") }}'
|
||||
- name: INPUT_DATA
|
||||
description: '{{ doc("evm_tx_input_data") }}'
|
||||
- name: GAS_PRICE
|
||||
description: '{{ doc("evm_tx_gas_price") }}'
|
||||
- name: GAS_USED
|
||||
description: '{{ doc("evm_tx_gas_used") }}'
|
||||
- name: GAS_LIMIT
|
||||
description: '{{ doc("evm_tx_gas_limit") }}'
|
||||
- name: CUMULATIVE_GAS_USED
|
||||
description: '{{ doc("evm_cumulative_gas_used") }}'
|
||||
- name: EFFECTIVE_GAS_PRICE
|
||||
description: '{{ doc("evm_effective_gas_price") }}'
|
||||
- name: R
|
||||
description: '{{ doc("evm_r") }}'
|
||||
- name: S
|
||||
description: '{{ doc("evm_s") }}'
|
||||
- name: V
|
||||
description: '{{ doc("evm_v") }}'
|
||||
- name: MAX_FEE_PER_GAS
|
||||
description: '{{ doc("evm_max_fee_per_gas") }}'
|
||||
- name: MAX_PRIORITY_FEE_PER_GAS
|
||||
description: '{{ doc("evm_max_priority_fee_per_gas") }}'
|
||||
- name: L1_FEE
|
||||
description: '{{ doc("evm_l1_fee") }}'
|
||||
- name: L1_FEE_PRECISE_RAW
|
||||
description: '{{ doc("evm_l1_fee_precise_raw") }}'
|
||||
- name: Y_PARITY
|
||||
description: '{{ doc("evm_y_parity") }}'
|
||||
- name: ACCESS_LIST
|
||||
description: '{{ doc("evm_access_list") }}'
|
||||
- name: FACT_TRANSACTIONS_ID
|
||||
description: '{{ doc("evm_pk") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("evm_inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: '{{ doc("evm_modified_timestamp") }}'
|
||||
@ -0,0 +1,9 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['full_test']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
{{ ref('testnet__fact_blocks') }}
|
||||
@ -0,0 +1,138 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: test_gold_testnet__fact_blocks_full
|
||||
description: "This is a view used to test all of the gold fact blocks model."
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- BLOCK_NUMBER
|
||||
- fsc_utils.sequence_gaps:
|
||||
column_name: BLOCK_NUMBER
|
||||
where: BLOCK_TIMESTAMP < CURRENT_DATE - 1
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: BLOCK_HASH
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: BLOCK_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- name: NETWORK
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: ^[a-zA-Z0-9_]+$
|
||||
- name: TX_COUNT
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: SIZE
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: MINER
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: EXTRA_DATA
|
||||
tests:
|
||||
- not_null
|
||||
- name: PARENT_HASH
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: GAS_USED
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: GAS_LIMIT
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: SHA3_UNCLES
|
||||
tests:
|
||||
- not_null
|
||||
- name: UNCLE_BLOCKS
|
||||
tests:
|
||||
- not_null
|
||||
- name: NONCE
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: RECEIPTS_ROOT
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: STATE_ROOT
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: TRANSACTIONS_ROOT
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: LOGS_BLOOM
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: FACT_BLOCKS_ID
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_unique
|
||||
- name: INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
@ -0,0 +1,16 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['recent_test']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
{{ ref('testnet__fact_blocks') }}
|
||||
WHERE
|
||||
block_number > (
|
||||
SELECT
|
||||
block_number
|
||||
FROM
|
||||
{{ ref('_testnet_block_lookback') }}
|
||||
)
|
||||
@ -0,0 +1,147 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: test_gold_testnet__fact_blocks_recent
|
||||
description: "This is a view used to test the last three days of fact blocks."
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- BLOCK_NUMBER
|
||||
- fsc_utils.sequence_gaps:
|
||||
column_name: BLOCK_NUMBER
|
||||
config:
|
||||
severity: error
|
||||
error_if: ">10"
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: BLOCK_HASH
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: BLOCK_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- name: NETWORK
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: ^[a-zA-Z0-9_]+$
|
||||
- name: TX_COUNT
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: SIZE
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: MINER
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: EXTRA_DATA
|
||||
tests:
|
||||
- not_null
|
||||
- name: PARENT_HASH
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: GAS_USED
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: GAS_LIMIT
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: DIFFICULTY
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: SHA3_UNCLES
|
||||
tests:
|
||||
- not_null
|
||||
- name: UNCLE_BLOCKS
|
||||
tests:
|
||||
- not_null
|
||||
- name: NONCE
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: RECEIPTS_ROOT
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: STATE_ROOT
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: TRANSACTIONS_ROOT
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: LOGS_BLOOM
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: FACT_BLOCKS_ID
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_unique
|
||||
- name: INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
@ -0,0 +1,9 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['full_test']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
{{ ref('testnet__fact_event_logs') }}
|
||||
@ -0,0 +1,102 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: test_gold_testnet__fact_event_logs_full
|
||||
description: "This is a view used to test all of the gold fact event logs model."
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- TX_HASH
|
||||
- EVENT_INDEX
|
||||
- fsc_utils.sequence_gaps:
|
||||
partition_by:
|
||||
- BLOCK_NUMBER
|
||||
- TX_HASH
|
||||
column_name: EVENT_INDEX
|
||||
where: BLOCK_TIMESTAMP < CURRENT_DATE - 1
|
||||
- fsc_evm.events_match_txs:
|
||||
transactions_model: ref('test_gold_testnet__fact_transactions_full')
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- name: BLOCK_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: TX_HASH
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- fsc_utils.tx_block_count:
|
||||
config:
|
||||
severity: error
|
||||
error_if: "!=0"
|
||||
- name: TX_POSITION
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: EVENT_INDEX
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: CONTRACT_ADDRESS
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: TOPICS
|
||||
tests:
|
||||
- not_null
|
||||
- name: DATA
|
||||
tests:
|
||||
- not_null
|
||||
- name: EVENT_REMOVED
|
||||
tests:
|
||||
- not_null
|
||||
- name: ORIGIN_FROM_ADDRESS
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: ORIGIN_TO_ADDRESS
|
||||
tests:
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: ORIGIN_FUNCTION_SIGNATURE
|
||||
tests:
|
||||
- not_null
|
||||
- name: TX_SUCCEEDED
|
||||
tests:
|
||||
- not_null
|
||||
- name: FACT_EVENT_LOGS_ID
|
||||
tests:
|
||||
- not_null
|
||||
- name: INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
@ -0,0 +1,16 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['recent_test']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
{{ ref('testnet__fact_event_logs') }}
|
||||
WHERE
|
||||
block_number > (
|
||||
SELECT
|
||||
block_number
|
||||
FROM
|
||||
{{ ref('_testnet_block_lookback') }}
|
||||
)
|
||||
@ -0,0 +1,101 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: test_gold_testnet__fact_event_logs_recent
|
||||
description: "This is a view used to test the last three days of fact event logs."
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- TX_HASH
|
||||
- EVENT_INDEX
|
||||
- fsc_utils.sequence_gaps:
|
||||
partition_by:
|
||||
- BLOCK_NUMBER
|
||||
- TX_HASH
|
||||
column_name: EVENT_INDEX
|
||||
- fsc_evm.events_match_txs:
|
||||
transactions_model: ref('test_gold_testnet__fact_transactions_recent')
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- name: BLOCK_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: TX_HASH
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- fsc_utils.tx_block_count:
|
||||
config:
|
||||
severity: error
|
||||
error_if: "!=0"
|
||||
- name: TX_POSITION
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: EVENT_INDEX
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: CONTRACT_ADDRESS
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: TOPICS
|
||||
tests:
|
||||
- not_null
|
||||
- name: DATA
|
||||
tests:
|
||||
- not_null
|
||||
- name: EVENT_REMOVED
|
||||
tests:
|
||||
- not_null
|
||||
- name: ORIGIN_FROM_ADDRESS
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: ORIGIN_TO_ADDRESS
|
||||
tests:
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: ORIGIN_FUNCTION_SIGNATURE
|
||||
tests:
|
||||
- not_null
|
||||
- name: TX_SUCCEEDED
|
||||
tests:
|
||||
- not_null
|
||||
- name: FACT_EVENT_LOGS_ID
|
||||
tests:
|
||||
- not_null
|
||||
- name: INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
@ -0,0 +1,9 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['full_test']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
{{ ref('testnet__fact_traces') }}
|
||||
@ -0,0 +1,122 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: test_gold_testnet__fact_traces_full
|
||||
description: "This is a view used to test all of the gold fact traces model."
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- TX_HASH
|
||||
- TRACE_INDEX
|
||||
- fsc_utils.sequence_gaps:
|
||||
partition_by:
|
||||
- TX_HASH
|
||||
column_name: TRACE_INDEX
|
||||
where: BLOCK_TIMESTAMP < CURRENT_DATE - 1 AND TX_HASH IS NOT NULL
|
||||
- fsc_evm.txs_have_traces:
|
||||
transactions_model: ref('test_gold_testnet__fact_transactions_full')
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: BLOCK_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: TX_HASH
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: TX_POSITION
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: TRACE_INDEX
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: FROM_ADDRESS
|
||||
tests:
|
||||
- not_null:
|
||||
where: TYPE <> 'SELFDESTRUCT'
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: TO_ADDRESS
|
||||
tests:
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
where: TO_ADDRESS IS NOT NULL
|
||||
- name: INPUT
|
||||
tests:
|
||||
- not_null
|
||||
- name: TYPE
|
||||
tests:
|
||||
- not_null
|
||||
- name: TRACE_ADDRESS
|
||||
tests:
|
||||
- not_null
|
||||
- name: SUB_TRACES
|
||||
tests:
|
||||
- not_null
|
||||
- name: VALUE
|
||||
tests:
|
||||
- not_null
|
||||
- name: VALUE_PRECISE_RAW
|
||||
tests:
|
||||
- not_null
|
||||
- name: VALUE_PRECISE
|
||||
tests:
|
||||
- not_null
|
||||
- name: VALUE_HEX
|
||||
tests:
|
||||
- not_null
|
||||
- name: GAS
|
||||
tests:
|
||||
- not_null
|
||||
- name: GAS_USED
|
||||
tests:
|
||||
- not_null
|
||||
- name: ORIGIN_FROM_ADDRESS
|
||||
tests:
|
||||
- not_null
|
||||
- name: ORIGIN_FUNCTION_SIGNATURE
|
||||
tests:
|
||||
- not_null
|
||||
- name: TRACE_SUCCEEDED
|
||||
tests:
|
||||
- not_null
|
||||
- name: TX_SUCCEEDED
|
||||
tests:
|
||||
- not_null
|
||||
- name: FACT_TRACES_ID
|
||||
tests:
|
||||
- not_null
|
||||
- name: INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
@ -0,0 +1,16 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['recent_test']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
{{ ref('testnet__fact_traces') }}
|
||||
WHERE
|
||||
block_number > (
|
||||
SELECT
|
||||
block_number
|
||||
FROM
|
||||
{{ ref('_testnet_block_lookback') }}
|
||||
)
|
||||
@ -0,0 +1,122 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: test_gold_testnet__fact_traces_recent
|
||||
description: "This is a view used to test the last three days of fact traces."
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- TX_HASH
|
||||
- TRACE_INDEX
|
||||
- fsc_utils.sequence_gaps:
|
||||
partition_by:
|
||||
- TX_HASH
|
||||
column_name: TRACE_INDEX
|
||||
where: TX_HASH IS NOT NULL
|
||||
- fsc_evm.txs_have_traces:
|
||||
transactions_model: ref('test_gold_testnet__fact_transactions_recent')
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: BLOCK_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: TX_HASH
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: TX_POSITION
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: TRACE_INDEX
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: FROM_ADDRESS
|
||||
tests:
|
||||
- not_null:
|
||||
where: TYPE <> 'SELFDESTRUCT'
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: TO_ADDRESS
|
||||
tests:
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
where: TO_ADDRESS IS NOT NULL
|
||||
- name: INPUT
|
||||
tests:
|
||||
- not_null
|
||||
- name: TYPE
|
||||
tests:
|
||||
- not_null
|
||||
- name: TRACE_ADDRESS
|
||||
tests:
|
||||
- not_null
|
||||
- name: SUB_TRACES
|
||||
tests:
|
||||
- not_null
|
||||
- name: VALUE
|
||||
tests:
|
||||
- not_null
|
||||
- name: VALUE_PRECISE_RAW
|
||||
tests:
|
||||
- not_null
|
||||
- name: VALUE_PRECISE
|
||||
tests:
|
||||
- not_null
|
||||
- name: VALUE_HEX
|
||||
tests:
|
||||
- not_null
|
||||
- name: GAS
|
||||
tests:
|
||||
- not_null
|
||||
- name: GAS_USED
|
||||
tests:
|
||||
- not_null
|
||||
- name: ORIGIN_FROM_ADDRESS
|
||||
tests:
|
||||
- not_null
|
||||
- name: ORIGIN_FUNCTION_SIGNATURE
|
||||
tests:
|
||||
- not_null
|
||||
- name: TRACE_SUCCEEDED
|
||||
tests:
|
||||
- not_null
|
||||
- name: TX_SUCCEEDED
|
||||
tests:
|
||||
- not_null
|
||||
- name: FACT_TRACES_ID
|
||||
tests:
|
||||
- not_null
|
||||
- name: INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
@ -0,0 +1,9 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['full_test']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
{{ ref('testnet__fact_transactions') }}
|
||||
@ -0,0 +1,125 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: test_gold_testnet__fact_transactions_full
|
||||
description: "This is a view used to test all of the gold fact transactions model."
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- TX_HASH
|
||||
- fsc_utils.sequence_gaps:
|
||||
partition_by:
|
||||
- BLOCK_NUMBER
|
||||
column_name: TX_POSITION
|
||||
where: BLOCK_TIMESTAMP < CURRENT_DATE - 1
|
||||
- fsc_evm.txs_match_blocks:
|
||||
blocks_model: ref('test_gold_testnet__fact_blocks_full')
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: BLOCK_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- name: TX_HASH
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: FROM_ADDRESS
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: TO_ADDRESS
|
||||
tests:
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
where: TO_ADDRESS IS NOT NULL
|
||||
- name: ORIGIN_FUNCTION_SIGNATURE
|
||||
tests:
|
||||
- not_null
|
||||
- name: VALUE
|
||||
tests:
|
||||
- not_null
|
||||
- name: VALUE_PRECISE_RAW
|
||||
tests:
|
||||
- not_null
|
||||
- name: VALUE_PRECISE
|
||||
tests:
|
||||
- not_null
|
||||
- name: TX_FEE
|
||||
tests:
|
||||
- not_null
|
||||
- name: TX_FEE_PRECISE
|
||||
tests:
|
||||
- not_null
|
||||
- name: TX_SUCCEEDED
|
||||
tests:
|
||||
- not_null
|
||||
- name: TX_TYPE
|
||||
tests:
|
||||
- not_null
|
||||
- name: NONCE
|
||||
tests:
|
||||
- not_null
|
||||
- name: TX_POSITION
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: INPUT_DATA
|
||||
tests:
|
||||
- not_null
|
||||
- name: GAS_PRICE
|
||||
tests:
|
||||
- not_null
|
||||
- name: GAS_USED
|
||||
tests:
|
||||
- not_null
|
||||
- name: GAS_LIMIT
|
||||
tests:
|
||||
- not_null
|
||||
- name: CUMULATIVE_GAS_USED
|
||||
tests:
|
||||
- not_null
|
||||
- name: EFFECTIVE_GAS_PRICE
|
||||
tests:
|
||||
- not_null
|
||||
- name: R
|
||||
tests:
|
||||
- not_null
|
||||
- name: S
|
||||
tests:
|
||||
- not_null
|
||||
- name: V
|
||||
tests:
|
||||
- not_null
|
||||
- name: FACT_TRANSACTIONS_ID
|
||||
tests:
|
||||
- not_null
|
||||
- name: INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
@ -0,0 +1,16 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['recent_test']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
{{ ref('testnet__fact_transactions') }}
|
||||
WHERE
|
||||
block_number > (
|
||||
SELECT
|
||||
block_number
|
||||
FROM
|
||||
{{ ref('_testnet_block_lookback') }}
|
||||
)
|
||||
@ -0,0 +1,124 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: test_gold_testnet__fact_transactions_recent
|
||||
description: "This is a view used to test the last three days of fact transactions."
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- TX_HASH
|
||||
- fsc_utils.sequence_gaps:
|
||||
partition_by:
|
||||
- BLOCK_NUMBER
|
||||
column_name: TX_POSITION
|
||||
- fsc_evm.txs_match_blocks:
|
||||
blocks_model: ref('test_gold_testnet__fact_blocks_recent')
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: BLOCK_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- name: TX_HASH
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: FROM_ADDRESS
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
- name: TO_ADDRESS
|
||||
tests:
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: 0[xX][0-9a-fA-F]+
|
||||
where: TO_ADDRESS IS NOT NULL
|
||||
- name: ORIGIN_FUNCTION_SIGNATURE
|
||||
tests:
|
||||
- not_null
|
||||
- name: VALUE
|
||||
tests:
|
||||
- not_null
|
||||
- name: VALUE_PRECISE_RAW
|
||||
tests:
|
||||
- not_null
|
||||
- name: VALUE_PRECISE
|
||||
tests:
|
||||
- not_null
|
||||
- name: TX_FEE
|
||||
tests:
|
||||
- not_null
|
||||
- name: TX_FEE_PRECISE
|
||||
tests:
|
||||
- not_null
|
||||
- name: TX_SUCCEEDED
|
||||
tests:
|
||||
- not_null
|
||||
- name: TX_TYPE
|
||||
tests:
|
||||
- not_null
|
||||
- name: NONCE
|
||||
tests:
|
||||
- not_null
|
||||
- name: TX_POSITION
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- NUMBER
|
||||
- FLOAT
|
||||
- name: INPUT_DATA
|
||||
tests:
|
||||
- not_null
|
||||
- name: GAS_PRICE
|
||||
tests:
|
||||
- not_null
|
||||
- name: GAS_USED
|
||||
tests:
|
||||
- not_null
|
||||
- name: GAS_LIMIT
|
||||
tests:
|
||||
- not_null
|
||||
- name: CUMULATIVE_GAS_USED
|
||||
tests:
|
||||
- not_null
|
||||
- name: EFFECTIVE_GAS_PRICE
|
||||
tests:
|
||||
- not_null
|
||||
- name: R
|
||||
tests:
|
||||
- not_null
|
||||
- name: S
|
||||
tests:
|
||||
- not_null
|
||||
- name: V
|
||||
tests:
|
||||
- not_null
|
||||
- name: FACT_TRANSACTIONS_ID
|
||||
tests:
|
||||
- not_null
|
||||
- name: INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
41
models/testnet/core/silver/silver_testnet__blocks.sql
Normal file
41
models/testnet/core/silver/silver_testnet__blocks.sql
Normal file
@ -0,0 +1,41 @@
|
||||
-- depends_on: {{ ref('bronze_testnet__blocks') }}
|
||||
{{ config (
|
||||
materialized = "incremental",
|
||||
incremental_strategy = 'delete+insert',
|
||||
unique_key = "block_number",
|
||||
cluster_by = ['modified_timestamp::DATE','partition_key'],
|
||||
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
|
||||
tags = ['silver_testnet']
|
||||
) }}
|
||||
|
||||
WITH bronze_blocks AS (
|
||||
SELECT
|
||||
block_number,
|
||||
partition_key,
|
||||
DATA AS block_json,
|
||||
_inserted_timestamp
|
||||
FROM
|
||||
{% if is_incremental() %}
|
||||
{{ ref('bronze_testnet__blocks') }}
|
||||
WHERE _inserted_timestamp >= (
|
||||
SELECT
|
||||
COALESCE(MAX(_inserted_timestamp), '1900-01-01'::TIMESTAMP) AS _inserted_timestamp
|
||||
FROM {{ this }}
|
||||
) AND DATA IS NOT NULL
|
||||
{% else %}
|
||||
{{ ref('bronze_testnet__blocks_fr') }}
|
||||
WHERE DATA IS NOT NULL
|
||||
{% endif %}
|
||||
)
|
||||
|
||||
SELECT
|
||||
block_number,
|
||||
partition_key,
|
||||
block_json,
|
||||
_inserted_timestamp,
|
||||
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS blocks_id,
|
||||
SYSDATE() AS inserted_timestamp,
|
||||
SYSDATE() AS modified_timestamp,
|
||||
'{{ invocation_id }}' AS _invocation_id
|
||||
FROM bronze_blocks
|
||||
QUALIFY ROW_NUMBER() OVER (PARTITION BY blocks_id ORDER BY _inserted_timestamp DESC) = 1
|
||||
112
models/testnet/core/silver/silver_testnet__contracts.sql
Normal file
112
models/testnet/core/silver/silver_testnet__contracts.sql
Normal file
@ -0,0 +1,112 @@
|
||||
{{ config(
|
||||
materialized = 'incremental',
|
||||
unique_key = 'contract_address',
|
||||
merge_exclude_columns = ["inserted_timestamp"],
|
||||
tags = ['silver_testnet','contracts']
|
||||
) }}
|
||||
|
||||
WITH base_metadata AS (
|
||||
|
||||
SELECT
|
||||
contract_address,
|
||||
block_number,
|
||||
function_sig AS function_signature,
|
||||
read_result AS read_output,
|
||||
_inserted_timestamp
|
||||
FROM
|
||||
{{ ref('bronze_api_testnet__token_reads') }}
|
||||
WHERE
|
||||
read_result IS NOT NULL
|
||||
AND read_result <> '0x'
|
||||
|
||||
{% if is_incremental() %}
|
||||
AND _inserted_timestamp >= (
|
||||
SELECT
|
||||
COALESCE(
|
||||
MAX(
|
||||
_inserted_timestamp
|
||||
),
|
||||
'1970-01-01'
|
||||
)
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
),
|
||||
token_names AS (
|
||||
SELECT
|
||||
contract_address,
|
||||
block_number,
|
||||
function_signature,
|
||||
read_output,
|
||||
utils.udf_hex_to_string(
|
||||
SUBSTR(read_output,(64 * 2 + 3), len(read_output))) AS token_name
|
||||
FROM
|
||||
base_metadata
|
||||
WHERE
|
||||
function_signature = '0x06fdde03'
|
||||
AND token_name IS NOT NULL
|
||||
),
|
||||
token_symbols AS (
|
||||
SELECT
|
||||
contract_address,
|
||||
block_number,
|
||||
function_signature,
|
||||
read_output,
|
||||
utils.udf_hex_to_string(
|
||||
SUBSTR(read_output,(64 * 2 + 3), len(read_output))) AS token_symbol
|
||||
FROM
|
||||
base_metadata
|
||||
WHERE
|
||||
function_signature = '0x95d89b41'
|
||||
AND token_symbol IS NOT NULL
|
||||
),
|
||||
token_decimals AS (
|
||||
SELECT
|
||||
contract_address,
|
||||
CASE
|
||||
WHEN read_output IS NOT NULL THEN utils.udf_hex_to_int(
|
||||
read_output :: STRING
|
||||
)
|
||||
ELSE NULL
|
||||
END AS token_decimals,
|
||||
LENGTH(token_decimals) AS dec_length
|
||||
FROM
|
||||
base_metadata
|
||||
WHERE
|
||||
function_signature = '0x313ce567'
|
||||
AND read_output IS NOT NULL
|
||||
AND read_output <> '0x'
|
||||
),
|
||||
contracts AS (
|
||||
SELECT
|
||||
contract_address,
|
||||
MAX(_inserted_timestamp) AS _inserted_timestamp
|
||||
FROM
|
||||
base_metadata
|
||||
GROUP BY
|
||||
1
|
||||
)
|
||||
SELECT
|
||||
c1.contract_address :: STRING AS contract_address,
|
||||
token_name,
|
||||
TRY_TO_NUMBER(token_decimals) AS token_decimals,
|
||||
token_symbol,
|
||||
_inserted_timestamp,
|
||||
{{ dbt_utils.generate_surrogate_key(
|
||||
['c1.contract_address']
|
||||
) }} AS contracts_id,
|
||||
SYSDATE() AS inserted_timestamp,
|
||||
SYSDATE() AS modified_timestamp,
|
||||
'{{ invocation_id }}' AS _invocation_id
|
||||
FROM
|
||||
contracts c1
|
||||
LEFT JOIN token_names
|
||||
ON c1.contract_address = token_names.contract_address
|
||||
LEFT JOIN token_symbols
|
||||
ON c1.contract_address = token_symbols.contract_address
|
||||
LEFT JOIN token_decimals
|
||||
ON c1.contract_address = token_decimals.contract_address
|
||||
AND dec_length < 3 qualify(ROW_NUMBER() over(PARTITION BY c1.contract_address
|
||||
ORDER BY
|
||||
_inserted_timestamp DESC)) = 1
|
||||
@ -0,0 +1,44 @@
|
||||
{{ config (
|
||||
materialized = "incremental",
|
||||
unique_key = "created_contract_address",
|
||||
merge_exclude_columns = ["inserted_timestamp"],
|
||||
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(block_timestamp, tx_hash, created_contract_address, creator_address), SUBSTRING(created_contract_address, creator_address)",
|
||||
tags = ['silver_testnet','contracts']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
block_number,
|
||||
block_timestamp,
|
||||
tx_hash,
|
||||
to_address AS created_contract_address,
|
||||
from_address AS creator_address,
|
||||
input AS created_contract_input,
|
||||
inserted_timestamp AS _inserted_timestamp,
|
||||
{{ dbt_utils.generate_surrogate_key(
|
||||
['to_address']
|
||||
) }} AS created_contracts_id,
|
||||
SYSDATE() AS inserted_timestamp,
|
||||
SYSDATE() AS modified_timestamp,
|
||||
'{{ invocation_id }}' AS _invocation_id
|
||||
FROM
|
||||
{{ ref('testnet__fact_traces') }}
|
||||
WHERE
|
||||
TYPE ILIKE 'create%'
|
||||
AND to_address IS NOT NULL
|
||||
AND input IS NOT NULL
|
||||
AND input != '0x'
|
||||
AND trace_succeeded
|
||||
AND tx_succeeded
|
||||
|
||||
{% if is_incremental() %}
|
||||
AND inserted_timestamp >= (
|
||||
SELECT
|
||||
MAX(inserted_timestamp) - INTERVAL '4 hours'
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
|
||||
qualify(ROW_NUMBER() over(PARTITION BY created_contract_address
|
||||
ORDER BY
|
||||
_inserted_timestamp DESC)) = 1
|
||||
44
models/testnet/core/silver/silver_testnet__receipts.sql
Normal file
44
models/testnet/core/silver/silver_testnet__receipts.sql
Normal file
@ -0,0 +1,44 @@
|
||||
-- depends_on: {{ ref('bronze_testnet__receipts') }}
|
||||
{{ config (
|
||||
materialized = "incremental",
|
||||
incremental_strategy = 'delete+insert',
|
||||
unique_key = "block_number",
|
||||
cluster_by = ['modified_timestamp::DATE','partition_key'],
|
||||
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
|
||||
tags = ['silver_testnet']
|
||||
) }}
|
||||
|
||||
WITH bronze_receipts AS (
|
||||
SELECT
|
||||
block_number,
|
||||
partition_key,
|
||||
array_index,
|
||||
DATA AS receipts_json,
|
||||
_inserted_timestamp
|
||||
FROM
|
||||
{% if is_incremental() %}
|
||||
{{ ref('bronze_testnet__receipts') }}
|
||||
WHERE _inserted_timestamp >= (
|
||||
SELECT
|
||||
COALESCE(MAX(_inserted_timestamp), '1900-01-01'::TIMESTAMP) AS _inserted_timestamp
|
||||
FROM {{ this }}
|
||||
) AND DATA IS NOT NULL
|
||||
{% else %}
|
||||
{{ ref('bronze_testnet__receipts_fr') }}
|
||||
WHERE DATA IS NOT NULL
|
||||
{% endif %}
|
||||
)
|
||||
|
||||
SELECT
|
||||
block_number,
|
||||
partition_key,
|
||||
array_index,
|
||||
receipts_json,
|
||||
_inserted_timestamp,
|
||||
{{ dbt_utils.generate_surrogate_key(['block_number','array_index']) }} AS receipts_id,
|
||||
SYSDATE() AS inserted_timestamp,
|
||||
SYSDATE() AS modified_timestamp,
|
||||
'{{ invocation_id }}' AS _invocation_id
|
||||
FROM bronze_receipts
|
||||
where array_index is not null
|
||||
QUALIFY ROW_NUMBER() OVER (PARTITION BY receipts_id ORDER BY block_number DESC, _inserted_timestamp DESC) = 1
|
||||
@ -0,0 +1,134 @@
|
||||
{{ config(
|
||||
materialized = 'incremental',
|
||||
unique_key = "contract_address",
|
||||
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(contract_address)",
|
||||
tags = ['silver_testnet','contracts']
|
||||
) }}
|
||||
|
||||
WITH emitted_events AS (
|
||||
|
||||
SELECT
|
||||
contract_address,
|
||||
COUNT(*) AS event_count,
|
||||
MAX(inserted_timestamp) AS max_inserted_timestamp_logs,
|
||||
MAX(block_number) AS latest_event_block
|
||||
FROM
|
||||
{{ ref('testnet__fact_event_logs') }}
|
||||
|
||||
{% if is_incremental() %}
|
||||
WHERE
|
||||
inserted_timestamp > (
|
||||
SELECT
|
||||
MAX(max_inserted_timestamp_logs)
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
GROUP BY
|
||||
contract_address
|
||||
),
|
||||
function_calls AS (
|
||||
SELECT
|
||||
to_address AS contract_address,
|
||||
COUNT(*) AS function_call_count,
|
||||
MAX(inserted_timestamp) AS max_inserted_timestamp_traces,
|
||||
MAX(block_number) AS latest_call_block
|
||||
FROM
|
||||
{{ ref('testnet__fact_traces') }}
|
||||
WHERE
|
||||
tx_succeeded
|
||||
AND trace_succeeded
|
||||
AND to_address IS NOT NULL
|
||||
AND input IS NOT NULL
|
||||
AND input <> '0x'
|
||||
|
||||
{% if is_incremental() %}
|
||||
AND inserted_timestamp > (
|
||||
SELECT
|
||||
MAX(max_inserted_timestamp_traces)
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
GROUP BY
|
||||
1
|
||||
),
|
||||
active_contracts AS (
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
emitted_events
|
||||
UNION
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
function_calls
|
||||
),
|
||||
previous_totals AS (
|
||||
|
||||
{% if is_incremental() %}
|
||||
SELECT
|
||||
contract_address, total_event_count, total_call_count, max_inserted_timestamp_logs, latest_event_block, max_inserted_timestamp_traces, latest_call_block
|
||||
FROM
|
||||
{{ this }}
|
||||
{% else %}
|
||||
SELECT
|
||||
NULL AS contract_address, 0 AS total_event_count, 0 AS total_call_count, '1970-01-01 00:00:00' AS max_inserted_timestamp_logs, 0 AS latest_event_block, '1970-01-01 00:00:00' AS max_inserted_timestamp_traces, 0 AS latest_call_block
|
||||
{% endif %})
|
||||
SELECT
|
||||
C.contract_address,
|
||||
COALESCE(
|
||||
p.total_event_count,
|
||||
0
|
||||
) + COALESCE(
|
||||
e.event_count,
|
||||
0
|
||||
) AS total_event_count,
|
||||
COALESCE(
|
||||
p.total_call_count,
|
||||
0
|
||||
) + COALESCE(
|
||||
f.function_call_count,
|
||||
0
|
||||
) AS total_call_count,
|
||||
COALESCE(
|
||||
p.total_event_count,
|
||||
0
|
||||
) + COALESCE(
|
||||
e.event_count,
|
||||
0
|
||||
) + COALESCE(
|
||||
p.total_call_count,
|
||||
0
|
||||
) + COALESCE(
|
||||
f.function_call_count,
|
||||
0
|
||||
) AS total_interaction_count,
|
||||
COALESCE(
|
||||
e.max_inserted_timestamp_logs,
|
||||
p.max_inserted_timestamp_logs,
|
||||
'1970-01-01 00:00:00'
|
||||
) AS max_inserted_timestamp_logs,
|
||||
COALESCE(
|
||||
f.max_inserted_timestamp_traces,
|
||||
p.max_inserted_timestamp_traces,
|
||||
'1970-01-01 00:00:00'
|
||||
) AS max_inserted_timestamp_traces,
|
||||
COALESCE(
|
||||
e.latest_event_block,
|
||||
p.latest_event_block,
|
||||
0
|
||||
) AS latest_event_block,
|
||||
COALESCE(
|
||||
f.latest_call_block,
|
||||
p.latest_call_block,
|
||||
0
|
||||
) AS latest_call_block
|
||||
FROM
|
||||
active_contracts C
|
||||
LEFT JOIN emitted_events e
|
||||
ON C.contract_address = e.contract_address
|
||||
LEFT JOIN function_calls f
|
||||
ON C.contract_address = f.contract_address
|
||||
LEFT JOIN previous_totals p
|
||||
ON C.contract_address = p.contract_address
|
||||
136
models/testnet/core/silver/silver_testnet__traces.sql
Normal file
136
models/testnet/core/silver/silver_testnet__traces.sql
Normal file
@ -0,0 +1,136 @@
|
||||
-- depends_on: {{ ref('bronze_testnet__traces') }}
|
||||
|
||||
{{ config (
|
||||
materialized = "incremental",
|
||||
incremental_strategy = 'delete+insert',
|
||||
unique_key = "block_number",
|
||||
cluster_by = ['modified_timestamp::DATE','partition_key'],
|
||||
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
|
||||
tags = ['silver_testnet']
|
||||
) }}
|
||||
|
||||
WITH bronze_traces AS (
|
||||
SELECT
|
||||
block_number,
|
||||
partition_key,
|
||||
VALUE :array_index :: INT AS tx_position,
|
||||
DATA :result AS full_traces,
|
||||
_inserted_timestamp
|
||||
FROM
|
||||
|
||||
{% if is_incremental() %}
|
||||
{{ ref('bronze_testnet__traces') }}
|
||||
WHERE
|
||||
_inserted_timestamp >= (
|
||||
SELECT
|
||||
COALESCE(MAX(_inserted_timestamp), '1900-01-01') _inserted_timestamp
|
||||
FROM
|
||||
{{ this }}
|
||||
) AND DATA :result IS NOT NULL
|
||||
{% else %}
|
||||
{{ ref('bronze_testnet__traces_fr') }}
|
||||
WHERE DATA :result IS NOT NULL
|
||||
{% endif %}
|
||||
|
||||
qualify(ROW_NUMBER() over (PARTITION BY block_number, tx_position
|
||||
ORDER BY
|
||||
_inserted_timestamp DESC)) = 1
|
||||
),
|
||||
flatten_traces AS (
|
||||
SELECT
|
||||
block_number,
|
||||
tx_position,
|
||||
partition_key,
|
||||
IFF(
|
||||
path IN (
|
||||
'result',
|
||||
'result.value',
|
||||
'result.type',
|
||||
'result.to',
|
||||
'result.input',
|
||||
'result.gasUsed',
|
||||
'result.gas',
|
||||
'result.from',
|
||||
'result.output',
|
||||
'result.error',
|
||||
'result.revertReason',
|
||||
'result.time',
|
||||
'gasUsed',
|
||||
'gas',
|
||||
'type',
|
||||
'to',
|
||||
'from',
|
||||
'value',
|
||||
'input',
|
||||
'error',
|
||||
'output',
|
||||
'time',
|
||||
'revertReason'
|
||||
),
|
||||
'ORIGIN',
|
||||
REGEXP_REPLACE(REGEXP_REPLACE(path, '[^0-9]+', '_'), '^_|_$', '')
|
||||
) AS trace_address,
|
||||
_inserted_timestamp,
|
||||
OBJECT_AGG(
|
||||
key,
|
||||
VALUE
|
||||
) AS trace_json,
|
||||
CASE
|
||||
WHEN trace_address = 'ORIGIN' THEN NULL
|
||||
WHEN POSITION(
|
||||
'_' IN trace_address
|
||||
) = 0 THEN 'ORIGIN'
|
||||
ELSE REGEXP_REPLACE(
|
||||
trace_address,
|
||||
'_[0-9]+$',
|
||||
'',
|
||||
1,
|
||||
1
|
||||
)
|
||||
END AS parent_trace_address,
|
||||
SPLIT(
|
||||
trace_address,
|
||||
'_'
|
||||
) AS trace_address_array
|
||||
FROM
|
||||
bronze_traces txs,
|
||||
TABLE(
|
||||
FLATTEN(
|
||||
input => PARSE_JSON(
|
||||
txs.full_traces
|
||||
),
|
||||
recursive => TRUE
|
||||
)
|
||||
) f
|
||||
WHERE
|
||||
f.index IS NULL
|
||||
AND f.key != 'calls'
|
||||
AND f.path != 'result'
|
||||
GROUP BY
|
||||
block_number,
|
||||
tx_position,
|
||||
partition_key,
|
||||
trace_address,
|
||||
_inserted_timestamp
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
tx_position,
|
||||
trace_address,
|
||||
parent_trace_address,
|
||||
trace_address_array,
|
||||
trace_json,
|
||||
partition_key,
|
||||
_inserted_timestamp,
|
||||
{{ dbt_utils.generate_surrogate_key(
|
||||
['block_number'] +
|
||||
['tx_position'] +
|
||||
['trace_address']
|
||||
) }} AS traces_id,
|
||||
SYSDATE() AS inserted_timestamp,
|
||||
SYSDATE() AS modified_timestamp,
|
||||
'{{ invocation_id }}' AS _invocation_id
|
||||
FROM
|
||||
flatten_traces qualify(ROW_NUMBER() over(PARTITION BY traces_id
|
||||
ORDER BY
|
||||
_inserted_timestamp DESC)) = 1
|
||||
44
models/testnet/core/silver/silver_testnet__transactions.sql
Normal file
44
models/testnet/core/silver/silver_testnet__transactions.sql
Normal file
@ -0,0 +1,44 @@
|
||||
-- depends_on: {{ ref('bronze_testnet__transactions') }}
|
||||
{{ config (
|
||||
materialized = "incremental",
|
||||
incremental_strategy = 'delete+insert',
|
||||
unique_key = "block_number",
|
||||
cluster_by = ['modified_timestamp::DATE','partition_key'],
|
||||
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
|
||||
tags = ['silver_testnet']
|
||||
) }}
|
||||
|
||||
WITH bronze_transactions AS (
|
||||
SELECT
|
||||
block_number,
|
||||
partition_key,
|
||||
VALUE :array_index :: INT AS tx_position,
|
||||
DATA AS transaction_json,
|
||||
_inserted_timestamp
|
||||
FROM
|
||||
{% if is_incremental() %}
|
||||
{{ ref('bronze_testnet__transactions') }}
|
||||
WHERE _inserted_timestamp >= (
|
||||
SELECT
|
||||
COALESCE(MAX(_inserted_timestamp), '1900-01-01'::TIMESTAMP) AS _inserted_timestamp
|
||||
FROM {{ this }}
|
||||
) AND DATA IS NOT NULL
|
||||
{% else %}
|
||||
{{ ref('bronze_testnet__transactions_fr') }}
|
||||
WHERE DATA IS NOT NULL
|
||||
{% endif %}
|
||||
)
|
||||
|
||||
SELECT
|
||||
block_number,
|
||||
partition_key,
|
||||
tx_position,
|
||||
transaction_json,
|
||||
_inserted_timestamp,
|
||||
{{ dbt_utils.generate_surrogate_key(['block_number','tx_position']) }} AS transactions_id,
|
||||
SYSDATE() AS inserted_timestamp,
|
||||
SYSDATE() AS modified_timestamp,
|
||||
'{{ invocation_id }}' AS _invocation_id
|
||||
FROM bronze_transactions
|
||||
where tx_position is not null
|
||||
QUALIFY ROW_NUMBER() OVER (PARTITION BY transactions_id ORDER BY _inserted_timestamp DESC) = 1
|
||||
@ -0,0 +1,9 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['full_test']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
{{ ref('silver_testnet__blocks') }}
|
||||
@ -0,0 +1,51 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: test_silver_testnet__blocks_full
|
||||
description: "This is a view used to test all of the silver blocks model."
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- BLOCK_NUMBER
|
||||
- fsc_utils.sequence_gaps:
|
||||
column_name: BLOCK_NUMBER
|
||||
config:
|
||||
severity: error
|
||||
error_if: ">10"
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
tests:
|
||||
- not_null
|
||||
- name: BLOCK_JSON
|
||||
tests:
|
||||
- not_null
|
||||
- name: _INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
@ -0,0 +1,16 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['recent_test']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
{{ ref('silver_testnet__blocks') }}
|
||||
WHERE
|
||||
block_number > (
|
||||
SELECT
|
||||
block_number
|
||||
FROM
|
||||
{{ ref('_testnet_block_lookback') }}
|
||||
)
|
||||
@ -0,0 +1,51 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: test_silver_testnet__blocks_recent
|
||||
description: "This is a view used to test the last three days of blocks."
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- BLOCK_NUMBER
|
||||
- fsc_utils.sequence_gaps:
|
||||
column_name: BLOCK_NUMBER
|
||||
config:
|
||||
severity: error
|
||||
error_if: ">10"
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
tests:
|
||||
- not_null
|
||||
- name: BLOCK_JSON
|
||||
tests:
|
||||
- not_null
|
||||
- name: _INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
@ -0,0 +1,9 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['full_test']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
{{ ref('silver_testnet__receipts') }}
|
||||
@ -0,0 +1,49 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: test_silver_testnet__receipts_full
|
||||
description: "This is a view used to test all of the silver receipts model."
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- RECEIPTS_ID
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
tests:
|
||||
- not_null
|
||||
- name: ARRAY_INDEX
|
||||
tests:
|
||||
- not_null
|
||||
- name: RECEIPTS_JSON
|
||||
tests:
|
||||
- not_null
|
||||
- name: _INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
@ -0,0 +1,16 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['recent_test']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
{{ ref('silver_testnet__receipts') }}
|
||||
WHERE
|
||||
block_number > (
|
||||
SELECT
|
||||
block_number
|
||||
FROM
|
||||
{{ ref('_testnet_block_lookback') }}
|
||||
)
|
||||
@ -0,0 +1,49 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: test_silver_testnet__receipts_recent
|
||||
description: "This is a view used to test the last three days of receipts."
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- RECEIPTS_ID
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
tests:
|
||||
- not_null
|
||||
- name: ARRAY_INDEX
|
||||
tests:
|
||||
- not_null
|
||||
- name: RECEIPTS_JSON
|
||||
tests:
|
||||
- not_null
|
||||
- name: _INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
@ -0,0 +1,9 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['full_test']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
{{ ref('silver_testnet__traces') }}
|
||||
@ -0,0 +1,52 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: test_silver_testnet__traces_full
|
||||
description: "This is a view used to test all of the silver traces model."
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- TRACES_ID
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
tests:
|
||||
- not_null
|
||||
- name: TX_POSITION
|
||||
tests:
|
||||
- not_null
|
||||
- name: TRACE_ADDRESS
|
||||
tests:
|
||||
- not_null
|
||||
- name: TRACE_JSON
|
||||
tests:
|
||||
- not_null
|
||||
- name: _INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
@ -0,0 +1,16 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['recent_test']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
{{ ref('silver_testnet__traces') }}
|
||||
WHERE
|
||||
block_number > (
|
||||
SELECT
|
||||
block_number
|
||||
FROM
|
||||
{{ ref('_testnet_block_lookback') }}
|
||||
)
|
||||
@ -0,0 +1,52 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: test_silver_testnet__traces_recent
|
||||
description: "This is a view used to test the last three days of traces."
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- TRACES_ID
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
tests:
|
||||
- not_null
|
||||
- name: TX_POSITION
|
||||
tests:
|
||||
- not_null
|
||||
- name: TRACE_ADDRESS
|
||||
tests:
|
||||
- not_null
|
||||
- name: TRACE_JSON
|
||||
tests:
|
||||
- not_null
|
||||
- name: _INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
@ -0,0 +1,9 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['full_test']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
{{ ref('silver_testnet__transactions') }}
|
||||
@ -0,0 +1,53 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: test_silver_testnet__transactions_full
|
||||
description: "This is a view used to test all of the silver transactions model."
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- TRANSACTIONS_ID
|
||||
- fsc_utils.sequence_gaps:
|
||||
partition_by:
|
||||
- BLOCK_NUMBER
|
||||
column_name: TX_POSITION
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
tests:
|
||||
- not_null
|
||||
- name: TX_POSITION
|
||||
tests:
|
||||
- not_null
|
||||
- name: TRANSACTION_JSON
|
||||
tests:
|
||||
- not_null
|
||||
- name: _INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
@ -0,0 +1,16 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['recent_test']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
{{ ref('silver_testnet__transactions') }}
|
||||
WHERE
|
||||
block_number > (
|
||||
SELECT
|
||||
block_number
|
||||
FROM
|
||||
{{ ref('_testnet_block_lookback') }}
|
||||
)
|
||||
@ -0,0 +1,53 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: test_silver_testnet__transactions_recent
|
||||
description: "This is a view used to test the last three days of transactions."
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- TRANSACTIONS_ID
|
||||
- fsc_utils.sequence_gaps:
|
||||
partition_by:
|
||||
- BLOCK_NUMBER
|
||||
column_name: TX_POSITION
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
tests:
|
||||
- not_null
|
||||
- name: TX_POSITION
|
||||
tests:
|
||||
- not_null
|
||||
- name: TRANSACTION_JSON
|
||||
tests:
|
||||
- not_null
|
||||
- name: _INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: hour
|
||||
interval: 2
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_LTZ
|
||||
- TIMESTAMP_NTZ
|
||||
@ -0,0 +1,33 @@
|
||||
-- depends_on: {{ ref('bronze_testnet__blocks') }}
|
||||
|
||||
{{ config (
|
||||
materialized = "incremental",
|
||||
unique_key = "block_number",
|
||||
cluster_by = "ROUND(block_number, -3)",
|
||||
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
|
||||
tags = ['streamline_testnet_complete']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
block_number,
|
||||
file_name,
|
||||
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_testnet_blocks_id,
|
||||
SYSDATE() AS inserted_timestamp,
|
||||
SYSDATE() AS modified_timestamp,
|
||||
_inserted_timestamp,
|
||||
'{{ invocation_id }}' AS _invocation_id
|
||||
FROM
|
||||
{% if is_incremental() %}
|
||||
{{ ref('bronze_testnet__blocks') }}
|
||||
WHERE
|
||||
_inserted_timestamp >= (
|
||||
SELECT
|
||||
COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% else %}
|
||||
{{ ref('bronze_testnet__blocks_fr') }}
|
||||
{% endif %}
|
||||
|
||||
QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1
|
||||
@ -0,0 +1,33 @@
|
||||
-- depends_on: {{ ref('bronze_testnet__receipts') }}
|
||||
|
||||
{{ config (
|
||||
materialized = "incremental",
|
||||
unique_key = "block_number",
|
||||
cluster_by = "ROUND(block_number, -3)",
|
||||
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
|
||||
tags = ['streamline_testnet_complete']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
block_number,
|
||||
file_name,
|
||||
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_testnet_receipts_id,
|
||||
SYSDATE() AS inserted_timestamp,
|
||||
SYSDATE() AS modified_timestamp,
|
||||
_inserted_timestamp,
|
||||
'{{ invocation_id }}' AS _invocation_id
|
||||
FROM
|
||||
{% if is_incremental() %}
|
||||
{{ ref('bronze_testnet__receipts') }}
|
||||
WHERE
|
||||
_inserted_timestamp >= (
|
||||
SELECT
|
||||
COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% else %}
|
||||
{{ ref('bronze_testnet__receipts_fr') }}
|
||||
{% endif %}
|
||||
|
||||
QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1
|
||||
@ -0,0 +1,33 @@
|
||||
-- depends_on: {{ ref('bronze_testnet__traces') }}
|
||||
|
||||
{{ config (
|
||||
materialized = "incremental",
|
||||
unique_key = "block_number",
|
||||
cluster_by = "ROUND(block_number, -3)",
|
||||
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
|
||||
tags = ['streamline_testnet_complete']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
block_number,
|
||||
file_name,
|
||||
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_testnet_traces_id,
|
||||
SYSDATE() AS inserted_timestamp,
|
||||
SYSDATE() AS modified_timestamp,
|
||||
_inserted_timestamp,
|
||||
'{{ invocation_id }}' AS _invocation_id
|
||||
FROM
|
||||
{% if is_incremental() %}
|
||||
{{ ref('bronze_testnet__traces') }}
|
||||
WHERE
|
||||
_inserted_timestamp >= (
|
||||
SELECT
|
||||
COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% else %}
|
||||
{{ ref('bronze_testnet__traces_fr') }}
|
||||
{% endif %}
|
||||
|
||||
QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1
|
||||
@ -0,0 +1,33 @@
|
||||
-- depends_on: {{ ref('bronze_testnet__transactions') }}
|
||||
|
||||
{{ config (
|
||||
materialized = "incremental",
|
||||
unique_key = "block_number",
|
||||
cluster_by = "ROUND(block_number, -3)",
|
||||
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
|
||||
tags = ['streamline_testnet_complete']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
block_number,
|
||||
file_name,
|
||||
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_testnet_transactions_id,
|
||||
SYSDATE() AS inserted_timestamp,
|
||||
SYSDATE() AS modified_timestamp,
|
||||
_inserted_timestamp,
|
||||
'{{ invocation_id }}' AS _invocation_id
|
||||
FROM
|
||||
{% if is_incremental() %}
|
||||
{{ ref('bronze_testnet__transactions') }}
|
||||
WHERE
|
||||
_inserted_timestamp >= (
|
||||
SELECT
|
||||
COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% else %}
|
||||
{{ ref('bronze_testnet__transactions_fr') }}
|
||||
{% endif %}
|
||||
|
||||
QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1
|
||||
@ -0,0 +1,56 @@
|
||||
{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %}
|
||||
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
post_hook = fsc_utils.if_data_call_function_v2(
|
||||
func = 'streamline.udf_bulk_rest_api_v2',
|
||||
target = "{{this.schema}}.{{this.identifier}}",
|
||||
params ={ "external_table" :"testnet_blocks_transactions",
|
||||
"sql_limit" :"2000000",
|
||||
"producer_batch_size" :"7200",
|
||||
"worker_batch_size" :"1800",
|
||||
"sql_source" :"{{this.identifier}}",
|
||||
"async_concurrent_requests" :"1",
|
||||
"exploded_key": tojson(["result", "result.transactions"]) }
|
||||
),
|
||||
tags = ['streamline_testnet_history']
|
||||
) }}
|
||||
|
||||
WITH to_do AS (
|
||||
SELECT block_number
|
||||
FROM {{ ref("streamline__testnet_blocks") }}
|
||||
EXCEPT
|
||||
SELECT block_number
|
||||
FROM {{ ref("streamline__testnet_blocks_complete") }} b
|
||||
INNER JOIN {{ ref("streamline__testnet_transactions_complete") }} t USING(block_number)
|
||||
),
|
||||
ready_blocks AS (
|
||||
SELECT block_number
|
||||
FROM to_do
|
||||
where block_number < (select block_number from {{ ref("_testnet_block_lookback") }})
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
ROUND(block_number, -3) AS partition_key,
|
||||
live.udf_api(
|
||||
'POST',
|
||||
'{Service}/{Authentication}',
|
||||
OBJECT_CONSTRUCT(
|
||||
'Content-Type', 'application/json',
|
||||
'fsc-quantum-state', 'streamline'
|
||||
),
|
||||
OBJECT_CONSTRUCT(
|
||||
'id', block_number,
|
||||
'jsonrpc', '2.0',
|
||||
'method', 'eth_getBlockByNumber',
|
||||
'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE)
|
||||
),
|
||||
'{{ node_secret_path }}'
|
||||
) AS request
|
||||
FROM
|
||||
ready_blocks
|
||||
|
||||
ORDER BY block_number desc
|
||||
|
||||
LIMIT
|
||||
2000000
|
||||
@ -0,0 +1,55 @@
|
||||
{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %}
|
||||
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
post_hook = fsc_utils.if_data_call_function_v2(
|
||||
func = 'streamline.udf_bulk_rest_api_v2',
|
||||
target = "{{this.schema}}.{{this.identifier}}",
|
||||
params ={ "external_table" :"testnet_receipts",
|
||||
"sql_limit" :"2000000",
|
||||
"producer_batch_size" :"7200",
|
||||
"worker_batch_size" :"1800",
|
||||
"sql_source" :"{{this.identifier}}",
|
||||
"async_concurrent_requests" :"1",
|
||||
"exploded_key": tojson(["result"]) }
|
||||
),
|
||||
tags = ['streamline_testnet_history']
|
||||
) }}
|
||||
|
||||
WITH to_do AS (
|
||||
SELECT block_number
|
||||
FROM {{ ref("streamline__testnet_blocks") }}
|
||||
EXCEPT
|
||||
SELECT block_number
|
||||
FROM {{ ref("streamline__testnet_receipts_complete") }}
|
||||
),
|
||||
ready_blocks AS (
|
||||
SELECT block_number
|
||||
FROM to_do
|
||||
where block_number < (select block_number from {{ ref("_testnet_block_lookback") }})
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
ROUND(block_number, -3) AS partition_key,
|
||||
live.udf_api(
|
||||
'POST',
|
||||
'{Service}/{Authentication}',
|
||||
OBJECT_CONSTRUCT(
|
||||
'Content-Type', 'application/json',
|
||||
'fsc-quantum-state', 'streamline'
|
||||
),
|
||||
OBJECT_CONSTRUCT(
|
||||
'id', block_number,
|
||||
'jsonrpc', '2.0',
|
||||
'method', 'eth_getBlockReceipts',
|
||||
'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number))
|
||||
),
|
||||
'{{ node_secret_path }}'
|
||||
) AS request
|
||||
FROM
|
||||
ready_blocks
|
||||
|
||||
ORDER BY block_number desc
|
||||
|
||||
LIMIT
|
||||
2000000
|
||||
@ -0,0 +1,55 @@
|
||||
{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %}
|
||||
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
post_hook = fsc_utils.if_data_call_function_v2(
|
||||
func = 'streamline.udf_bulk_rest_api_v2',
|
||||
target = "{{this.schema}}.{{this.identifier}}",
|
||||
params ={ "external_table" :"testnet_traces",
|
||||
"sql_limit" :"2000000",
|
||||
"producer_batch_size" :"7200",
|
||||
"worker_batch_size" :"1800",
|
||||
"sql_source" :"{{this.identifier}}",
|
||||
"async_concurrent_requests" :"1",
|
||||
"exploded_key": tojson(["result"]) }
|
||||
),
|
||||
tags = ['streamline_testnet_history']
|
||||
) }}
|
||||
|
||||
WITH to_do AS (
|
||||
SELECT block_number
|
||||
FROM {{ ref("streamline__testnet_blocks") }}
|
||||
EXCEPT
|
||||
SELECT block_number
|
||||
FROM {{ ref("streamline__testnet_traces_complete") }}
|
||||
),
|
||||
ready_blocks AS (
|
||||
SELECT block_number
|
||||
FROM to_do
|
||||
where block_number < (select block_number from {{ ref("_testnet_block_lookback") }})
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
ROUND(block_number, -3) AS partition_key,
|
||||
live.udf_api(
|
||||
'POST',
|
||||
'{Service}/{Authentication}',
|
||||
OBJECT_CONSTRUCT(
|
||||
'Content-Type', 'application/json',
|
||||
'fsc-quantum-state', 'streamline'
|
||||
),
|
||||
OBJECT_CONSTRUCT(
|
||||
'id', block_number,
|
||||
'jsonrpc', '2.0',
|
||||
'method', 'debug_traceBlockByNumber',
|
||||
'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s'))
|
||||
),
|
||||
'{{ node_secret_path }}'
|
||||
) AS request
|
||||
FROM
|
||||
ready_blocks
|
||||
|
||||
ORDER BY block_number desc
|
||||
|
||||
LIMIT
|
||||
2000000
|
||||
@ -0,0 +1,54 @@
|
||||
{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %}
|
||||
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
post_hook = fsc_utils.if_data_call_function_v2(
|
||||
func = 'streamline.udf_bulk_rest_api_v2',
|
||||
target = "{{this.schema}}.{{this.identifier}}",
|
||||
params ={ "external_table" :"testnet_blocks_transactions",
|
||||
"sql_limit" :"7200",
|
||||
"producer_batch_size" :"3600",
|
||||
"worker_batch_size" :"1800",
|
||||
"sql_source" :"{{this.identifier}}",
|
||||
"exploded_key": tojson(["result", "result.transactions"]) }
|
||||
),
|
||||
tags = ['streamline_testnet_realtime']
|
||||
) }}
|
||||
|
||||
WITH to_do AS (
|
||||
SELECT block_number
|
||||
FROM {{ ref("streamline__testnet_blocks") }}
|
||||
EXCEPT
|
||||
SELECT block_number
|
||||
FROM {{ ref("streamline__testnet_blocks_complete") }} b
|
||||
INNER JOIN {{ ref("streamline__testnet_transactions_complete") }} t USING(block_number)
|
||||
),
|
||||
ready_blocks AS (
|
||||
SELECT block_number
|
||||
FROM to_do
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
ROUND(block_number, -3) AS partition_key,
|
||||
live.udf_api(
|
||||
'POST',
|
||||
'{Service}/{Authentication}',
|
||||
OBJECT_CONSTRUCT(
|
||||
'Content-Type', 'application/json',
|
||||
'fsc-quantum-state', 'streamline'
|
||||
),
|
||||
OBJECT_CONSTRUCT(
|
||||
'id', block_number,
|
||||
'jsonrpc', '2.0',
|
||||
'method', 'eth_getBlockByNumber',
|
||||
'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE)
|
||||
),
|
||||
'{{ node_secret_path }}'
|
||||
) AS request
|
||||
FROM
|
||||
ready_blocks
|
||||
|
||||
ORDER BY block_number desc
|
||||
|
||||
LIMIT
|
||||
7200
|
||||
@ -0,0 +1,53 @@
|
||||
{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %}
|
||||
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
post_hook = fsc_utils.if_data_call_function_v2(
|
||||
func = 'streamline.udf_bulk_rest_api_v2',
|
||||
target = "{{this.schema}}.{{this.identifier}}",
|
||||
params ={ "external_table" :"testnet_receipts",
|
||||
"sql_limit" :"7200",
|
||||
"producer_batch_size" :"1800",
|
||||
"worker_batch_size" :"1800",
|
||||
"sql_source" :"{{this.identifier}}",
|
||||
"exploded_key": tojson(["result"]) }
|
||||
),
|
||||
tags = ['streamline_testnet_realtime']
|
||||
) }}
|
||||
|
||||
WITH to_do AS (
|
||||
SELECT block_number
|
||||
FROM {{ ref("streamline__testnet_blocks") }}
|
||||
EXCEPT
|
||||
SELECT block_number
|
||||
FROM {{ ref("streamline__testnet_receipts_complete") }}
|
||||
),
|
||||
ready_blocks AS (
|
||||
SELECT block_number
|
||||
FROM to_do
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
ROUND(block_number, -3) AS partition_key,
|
||||
live.udf_api(
|
||||
'POST',
|
||||
'{Service}/{Authentication}',
|
||||
OBJECT_CONSTRUCT(
|
||||
'Content-Type', 'application/json',
|
||||
'fsc-quantum-state', 'streamline'
|
||||
),
|
||||
OBJECT_CONSTRUCT(
|
||||
'id', block_number,
|
||||
'jsonrpc', '2.0',
|
||||
'method', 'eth_getBlockReceipts',
|
||||
'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number))
|
||||
),
|
||||
'{{ node_secret_path }}'
|
||||
) AS request
|
||||
FROM
|
||||
ready_blocks
|
||||
|
||||
ORDER BY block_number desc
|
||||
|
||||
LIMIT
|
||||
7200
|
||||
@ -0,0 +1,53 @@
|
||||
{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %}
|
||||
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
post_hook = fsc_utils.if_data_call_function_v2(
|
||||
func = 'streamline.udf_bulk_rest_api_v2',
|
||||
target = "{{this.schema}}.{{this.identifier}}",
|
||||
params ={ "external_table" :"testnet_traces",
|
||||
"sql_limit" :"7200",
|
||||
"producer_batch_size" :"1800",
|
||||
"worker_batch_size" :"1800",
|
||||
"sql_source" :"{{this.identifier}}",
|
||||
"exploded_key": tojson(["result"]) }
|
||||
),
|
||||
tags = ['streamline_testnet_realtime']
|
||||
) }}
|
||||
|
||||
WITH to_do AS (
|
||||
SELECT block_number
|
||||
FROM {{ ref("streamline__testnet_blocks") }}
|
||||
EXCEPT
|
||||
SELECT block_number
|
||||
FROM {{ ref("streamline__testnet_traces_complete") }}
|
||||
),
|
||||
ready_blocks AS (
|
||||
SELECT block_number
|
||||
FROM to_do
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
ROUND(block_number, -3) AS partition_key,
|
||||
live.udf_api(
|
||||
'POST',
|
||||
'{Service}/{Authentication}',
|
||||
OBJECT_CONSTRUCT(
|
||||
'Content-Type', 'application/json',
|
||||
'fsc-quantum-state', 'streamline'
|
||||
),
|
||||
OBJECT_CONSTRUCT(
|
||||
'id', block_number,
|
||||
'jsonrpc', '2.0',
|
||||
'method', 'debug_traceBlockByNumber',
|
||||
'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s'))
|
||||
),
|
||||
'{{ node_secret_path }}'
|
||||
) AS request
|
||||
FROM
|
||||
ready_blocks
|
||||
|
||||
ORDER BY block_number desc
|
||||
|
||||
LIMIT
|
||||
7200
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user