mirror of
https://github.com/FlipsideCrypto/aurora-models.git
synced 2026-02-06 11:01:45 +00:00
pending for native
This commit is contained in:
commit
a37b3d1964
56
.github/workflows/dbt_alter_gha_task.yml
vendored
Normal file
56
.github/workflows/dbt_alter_gha_task.yml
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
name: dbt_alter_gha_tasks
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
workflow_name:
|
||||
type: string
|
||||
description: "Name of the workflow to perform the action on, no .yml extension"
|
||||
required: true
|
||||
task_action:
|
||||
type: string
|
||||
description: "Action to perform"
|
||||
required: true
|
||||
environment:
|
||||
type: string
|
||||
description: "Environment to run the workflow in"
|
||||
required: true
|
||||
|
||||
env:
|
||||
USE_VARS: "${{ vars.USE_VARS }}"
|
||||
DBT_VERSION: "${{ vars.DBT_VERSION }}"
|
||||
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: ${{ inputs.environment }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip3 install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Set up workflow name
|
||||
run: echo "WORKFLOW_NAME_UPPER=$(echo '${{ inputs.workflow_name }}' | tr '[:lower:]' '[:upper:]')" >> $GITHUB_ENV
|
||||
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run-operation fsc_utils.alter_gha_task --args '{ "task_name": "TRIGGER_${{ env.WORKFLOW_NAME_UPPER }}", "task_action": "${{ inputs.task_action }}" }'
|
||||
51
.github/workflows/dbt_docs_update.yml
vendored
51
.github/workflows/dbt_docs_update.yml
vendored
@ -22,6 +22,51 @@ concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_docs_updates.yml@main
|
||||
secrets: inherit
|
||||
dbt:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: refresh ddl for datashare
|
||||
run: |
|
||||
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
|
||||
|
||||
- name: checkout docs branch
|
||||
run: |
|
||||
git checkout -B docs origin/main
|
||||
|
||||
- name: generate dbt docs
|
||||
run: dbt docs generate -t prod
|
||||
|
||||
- name: move files to docs directory
|
||||
run: |
|
||||
mkdir -p ./docs
|
||||
cp target/{catalog.json,manifest.json,index.html} docs/
|
||||
- name: clean up target directory
|
||||
run: dbt clean
|
||||
|
||||
- name: check for changes
|
||||
run: git status
|
||||
|
||||
- name: stage changed files
|
||||
run: git add .
|
||||
|
||||
- name: commit changed files
|
||||
run: |
|
||||
git config user.email "abc@xyz"
|
||||
git config user.name "github-actions"
|
||||
git commit -am "Auto-update docs"
|
||||
- name: push changes to docs
|
||||
run: |
|
||||
git push -f --set-upstream origin docs
|
||||
|
||||
16
.github/workflows/dbt_run_adhoc.yml
vendored
16
.github/workflows/dbt_run_adhoc.yml
vendored
@ -7,7 +7,7 @@ on:
|
||||
- "main"
|
||||
inputs:
|
||||
environment:
|
||||
type: choice
|
||||
type: choice
|
||||
description: DBT Run Environment
|
||||
required: true
|
||||
options:
|
||||
@ -15,9 +15,9 @@ on:
|
||||
- prod
|
||||
default: dev
|
||||
warehouse:
|
||||
type: choice
|
||||
type: choice
|
||||
description: Snowflake warehouse
|
||||
required: true
|
||||
required: true
|
||||
options:
|
||||
- DBT
|
||||
- DBT_CLOUD
|
||||
@ -25,9 +25,9 @@ on:
|
||||
default: DBT
|
||||
dbt_command:
|
||||
type: string
|
||||
description: 'DBT Run Command'
|
||||
description: "DBT Run Command"
|
||||
required: true
|
||||
|
||||
|
||||
env:
|
||||
USE_VARS: "${{ vars.USE_VARS }}"
|
||||
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
|
||||
@ -47,15 +47,15 @@ concurrency:
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
environment:
|
||||
name: workflow_${{ inputs.environment }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v1
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.7.x"
|
||||
python-version: "3.10"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
|
||||
53
.github/workflows/dbt_run_atlas.yml
vendored
Normal file
53
.github/workflows/dbt_run_atlas.yml
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
name: dbt_run_atlas
|
||||
run-name: dbt_run_atlas
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs once per day at 0 UTC
|
||||
- cron: "0 0 * * *"
|
||||
|
||||
env:
|
||||
USE_VARS: "${{ vars.USE_VARS }}"
|
||||
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
|
||||
DBT_VERSION: "${{ vars.DBT_VERSION }}"
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
dbt:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -s tag:atlas
|
||||
|
||||
- name: Store logs
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: dbt-logs
|
||||
path: |
|
||||
logs
|
||||
target
|
||||
39
.github/workflows/dbt_run_deployment.yml
vendored
39
.github/workflows/dbt_run_deployment.yml
vendored
@ -37,10 +37,35 @@ concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_run_deployment_template.yml@main
|
||||
with:
|
||||
dbt_command: |
|
||||
${{ inputs.dbt_command }}
|
||||
warehouse: ${{ inputs.WAREHOUSE }}
|
||||
secrets: inherit
|
||||
dbt:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
${{ inputs.dbt_command }}
|
||||
|
||||
- name: Run datashare model
|
||||
run: |
|
||||
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
|
||||
dbt run-operation run_query --args "{sql: call admin.datashare.sp_grant_share_permissions('${{ env.DATABASE }}')}"
|
||||
|
||||
- name: Store logs
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: dbt-logs
|
||||
path: |
|
||||
logs
|
||||
target
|
||||
|
||||
35
.github/workflows/dbt_run_dev_refresh.yml
vendored
35
.github/workflows/dbt_run_dev_refresh.yml
vendored
@ -25,11 +25,30 @@ concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_run_template.yml@main
|
||||
with:
|
||||
dbt_command: >
|
||||
dbt run-operation run_sp_create_prod_clone
|
||||
environment: workflow_prod
|
||||
warehouse: ${{ vars.WAREHOUSE }}
|
||||
secrets: inherit
|
||||
dbt:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run-operation run_sp_create_prod_clone
|
||||
|
||||
- name: Store logs
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: dbt-logs
|
||||
path: |
|
||||
logs
|
||||
target
|
||||
|
||||
@ -32,9 +32,9 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v1
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.7.x"
|
||||
python-version: "3.10"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
|
||||
4
.github/workflows/dbt_run_observability.yml
vendored
4
.github/workflows/dbt_run_observability.yml
vendored
@ -32,9 +32,9 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v1
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.7.x"
|
||||
python-version: "3.10"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
|
||||
41
.github/workflows/dbt_run_scheduled.yml
vendored
41
.github/workflows/dbt_run_scheduled.yml
vendored
@ -3,9 +3,9 @@ run-name: dbt_run_scheduled
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs "every hour at minute 10" (see https://crontab.guru)
|
||||
- cron: "10 */1 * * *"
|
||||
# schedule:
|
||||
# # Runs "every 10 minutes, 5 mins after the realtime job
|
||||
# - cron: "5,15,25,35,45,55 * * * *"
|
||||
|
||||
env:
|
||||
USE_VARS: "${{ vars.USE_VARS }}"
|
||||
@ -24,11 +24,30 @@ concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_run_template.yml@main
|
||||
with:
|
||||
dbt_command: >
|
||||
dbt run -s tag:core
|
||||
environment: workflow_prod
|
||||
warehouse: ${{ vars.WAREHOUSE }}
|
||||
secrets: inherit
|
||||
dbt:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -s tag:core
|
||||
|
||||
- name: Store logs
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: dbt-logs
|
||||
path: |
|
||||
logs
|
||||
target
|
||||
|
||||
38
.github/workflows/dbt_run_streamline_history.yml
vendored
38
.github/workflows/dbt_run_streamline_history.yml
vendored
@ -3,9 +3,6 @@ run-name: dbt_run_streamline_history
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs "every 4 hours" (see https://crontab.guru)
|
||||
- cron: "0 */4 * * *"
|
||||
|
||||
env:
|
||||
USE_VARS: "${{ vars.USE_VARS }}"
|
||||
@ -24,11 +21,30 @@ concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_run_template.yml@main
|
||||
with:
|
||||
dbt_command: >
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True, "STREAMLINE_RUN_HISTORY":True}' -m 1+models/silver/streamline/realtime/ --full-refresh
|
||||
environment: workflow_prod
|
||||
warehouse: ${{ vars.WAREHOUSE }}
|
||||
secrets: inherit
|
||||
dbt:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True, "STREAMLINE_RUN_HISTORY":True}' -m 1+models/silver/streamline/realtime/ --full-refresh
|
||||
|
||||
- name: Store logs
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: dbt-logs
|
||||
path: |
|
||||
logs
|
||||
target
|
||||
|
||||
@ -1,34 +0,0 @@
|
||||
name: dbt_run_streamline_realtime
|
||||
run-name: dbt_run_streamline_realtime
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs "every 1 hours" (see https://crontab.guru)
|
||||
- cron: "0 */1 * * *"
|
||||
|
||||
env:
|
||||
USE_VARS: "${{ vars.USE_VARS }}"
|
||||
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
|
||||
DBT_VERSION: "${{ vars.DBT_VERSION }}"
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_run_template.yml@main
|
||||
with:
|
||||
dbt_command: >
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/silver/streamline/realtime/
|
||||
environment: workflow_prod
|
||||
warehouse: ${{ vars.WAREHOUSE }}
|
||||
secrets: inherit
|
||||
53
.github/workflows/dbt_run_streamline_realtime_blocks_transactions.yml
vendored
Normal file
53
.github/workflows/dbt_run_streamline_realtime_blocks_transactions.yml
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
name: dbt_run_streamline_realtime_blocks_transactions
|
||||
run-name: dbt_run_streamline_realtime_blocks_transactions
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
# schedule:
|
||||
# # Runs every 10 minutes
|
||||
# - cron: "0,10,20,30,40,50 * * * *"
|
||||
|
||||
env:
|
||||
USE_VARS: "${{ vars.USE_VARS }}"
|
||||
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
|
||||
DBT_VERSION: "${{ vars.DBT_VERSION }}"
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
dbt:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/silver/streamline/realtime/streamline__blocks_realtime.sql 1+models/silver/streamline/realtime/streamline__transactions_realtime.sql
|
||||
|
||||
- name: Store logs
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: dbt-logs
|
||||
path: |
|
||||
logs
|
||||
target
|
||||
53
.github/workflows/dbt_run_streamline_realtime_tx_receipts.yml
vendored
Normal file
53
.github/workflows/dbt_run_streamline_realtime_tx_receipts.yml
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
name: dbt_run_streamline_realtime_tx_receipts
|
||||
run-name: dbt_run_streamline_realtime_tx_receipts
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
# schedule:
|
||||
# # Runs every 10 minutes
|
||||
# - cron: "0,10,20,30,40,50 * * * *"
|
||||
|
||||
env:
|
||||
USE_VARS: "${{ vars.USE_VARS }}"
|
||||
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
|
||||
DBT_VERSION: "${{ vars.DBT_VERSION }}"
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
dbt:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/silver/streamline/realtime/streamline__tx_receipts_realtime.sql
|
||||
|
||||
- name: Store logs
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: dbt-logs
|
||||
path: |
|
||||
logs
|
||||
target
|
||||
53
.github/workflows/dbt_run_streamline_traces_history.yml
vendored
Normal file
53
.github/workflows/dbt_run_streamline_traces_history.yml
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
name: dbt_run_streamline_traces_history
|
||||
run-name: dbt_run_streamline_traces_history
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Run every 2 hours
|
||||
- cron: "0 */2 * * *"
|
||||
|
||||
env:
|
||||
USE_VARS: "${{ vars.USE_VARS }}"
|
||||
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
|
||||
DBT_VERSION: "${{ vars.DBT_VERSION }}"
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
dbt:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/silver/streamline/history/streamline__traces_history.sql
|
||||
|
||||
- name: Store logs
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: dbt-logs
|
||||
path: |
|
||||
logs
|
||||
target
|
||||
4
.github/workflows/dbt_test_daily.yml
vendored
4
.github/workflows/dbt_test_daily.yml
vendored
@ -32,9 +32,9 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v1
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.7.x"
|
||||
python-version: "3.10"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
|
||||
40
.github/workflows/dbt_test_tasks.yml
vendored
Normal file
40
.github/workflows/dbt_test_tasks.yml
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
name: dbt_test_tasks
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
env:
|
||||
USE_VARS: "${{ vars.USE_VARS }}"
|
||||
DBT_VERSION: "${{ vars.DBT_VERSION }}"
|
||||
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt test -m models/github_actions/github_actions__current_task_status.sql
|
||||
4
.github/workflows/dbt_test_weekly.yml
vendored
4
.github/workflows/dbt_test_weekly.yml
vendored
@ -32,9 +32,9 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v1
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.7.x"
|
||||
python-version: "3.10"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
|
||||
5
data/github_actions__workflows.csv
Normal file
5
data/github_actions__workflows.csv
Normal file
@ -0,0 +1,5 @@
|
||||
workflow_name,workflow_schedule
|
||||
dbt_run_streamline_realtime_blocks_transactions,"12,42 * * * *"
|
||||
dbt_run_streamline_realtime_tx_receipts,"17,47 * * * *"
|
||||
dbt_run_scheduled,"25,55 * * * *"
|
||||
dbt_test_tasks,"28,58 * * * *"
|
||||
|
@ -5,7 +5,7 @@ name: "aurora_models"
|
||||
version: "1.0.0"
|
||||
config-version: 2
|
||||
|
||||
require-dbt-version: ">=1.4.0"
|
||||
require-dbt-version: ">=1.7.0"
|
||||
|
||||
# This setting configures which "profile" dbt uses for this project.
|
||||
profile: "aurora"
|
||||
@ -48,6 +48,7 @@ vars:
|
||||
UPDATE_SNOWFLAKE_TAGS: True
|
||||
WAIT: 0
|
||||
OBSERV_FULL_TEST: False
|
||||
START_GHA_TASKS: False
|
||||
|
||||
tests:
|
||||
+store_failures: true # all tests
|
||||
@ -67,5 +68,5 @@ dispatch:
|
||||
- dbt
|
||||
|
||||
query-comment:
|
||||
comment: '{{ dbt_snowflake_query_tags.get_query_comment(node) }}'
|
||||
comment: "{{ dbt_snowflake_query_tags.get_query_comment(node) }}"
|
||||
append: true # Snowflake removes prefixed comments.
|
||||
|
||||
@ -7,6 +7,5 @@
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{{- fsc_utils.create_udfs() -}}
|
||||
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
|
||||
57
models/bronze/core/bronze__streamline_FR_traces.sql
Normal file
57
models/bronze/core/bronze__streamline_FR_traces.sql
Normal file
@ -0,0 +1,57 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
WITH meta AS (
|
||||
|
||||
SELECT
|
||||
registered_on AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS _partition_by_block_id
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_files(
|
||||
table_name => '{{ source( "bronze_streamline", "traces") }}'
|
||||
)
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
s.value :metadata :request :params [0] :: STRING AS tx_hash,
|
||||
DATA,
|
||||
_inserted_timestamp,
|
||||
MD5(
|
||||
CAST(
|
||||
COALESCE(CAST(tx_hash AS text), '' :: STRING) AS text
|
||||
)
|
||||
) AS id,
|
||||
s._partition_by_block_id,
|
||||
s.value AS VALUE
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
"traces"
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b._partition_by_block_id = s._partition_by_block_id
|
||||
WHERE
|
||||
b._partition_by_block_id = s._partition_by_block_id
|
||||
AND (
|
||||
DATA :error :code IS NULL
|
||||
OR DATA :error :code NOT IN (
|
||||
'-32000',
|
||||
'-32001',
|
||||
'-32002',
|
||||
'-32003',
|
||||
'-32004',
|
||||
'-32005',
|
||||
'-32006',
|
||||
'-32007',
|
||||
'-32008',
|
||||
'-32009',
|
||||
'-32010',
|
||||
'-32608'
|
||||
)
|
||||
)
|
||||
57
models/bronze/core/bronze__streamline_traces.sql
Normal file
57
models/bronze/core/bronze__streamline_traces.sql
Normal file
@ -0,0 +1,57 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
WITH meta AS (
|
||||
|
||||
SELECT
|
||||
last_modified AS _inserted_timestamp,
|
||||
file_name,
|
||||
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS _partition_by_block_id
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_file_registration_history(
|
||||
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
|
||||
table_name => '{{ source( "bronze_streamline", "traces") }}')
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
block_number,
|
||||
s.value :metadata :request :params [0] :: STRING AS tx_hash,
|
||||
DATA,
|
||||
_inserted_timestamp,
|
||||
MD5(
|
||||
CAST(
|
||||
COALESCE(CAST(tx_hash AS text), '' :: STRING) AS text
|
||||
)
|
||||
) AS id,
|
||||
s._partition_by_block_id,
|
||||
s.value AS VALUE
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
"traces"
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b._partition_by_block_id = s._partition_by_block_id
|
||||
WHERE
|
||||
b._partition_by_block_id = s._partition_by_block_id
|
||||
AND (
|
||||
DATA :error :code IS NULL
|
||||
OR DATA :error :code NOT IN (
|
||||
'-32000',
|
||||
'-32001',
|
||||
'-32002',
|
||||
'-32003',
|
||||
'-32004',
|
||||
'-32005',
|
||||
'-32006',
|
||||
'-32007',
|
||||
'-32008',
|
||||
'-32009',
|
||||
'-32010',
|
||||
'-32608'
|
||||
)
|
||||
)
|
||||
@ -0,0 +1,27 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
asset_id,
|
||||
symbol,
|
||||
NAME,
|
||||
decimals,
|
||||
blockchain,
|
||||
is_deprecated,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_native_asset_metadata_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'silver_crosschain',
|
||||
'complete_native_asset_metadata'
|
||||
) }}
|
||||
WHERE
|
||||
blockchain = 'aurora'
|
||||
AND symbol = ''
|
||||
--pending to be added
|
||||
30
models/bronze/prices/bronze__complete_native_prices.sql
Normal file
30
models/bronze/prices/bronze__complete_native_prices.sql
Normal file
@ -0,0 +1,30 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
HOUR,
|
||||
asset_id,
|
||||
symbol,
|
||||
NAME,
|
||||
decimals,
|
||||
price,
|
||||
blockchain,
|
||||
is_imputed,
|
||||
is_deprecated,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_native_prices_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'silver_crosschain',
|
||||
'complete_native_prices'
|
||||
) }}
|
||||
WHERE
|
||||
blockchain = ''
|
||||
AND symbol = ''
|
||||
--pending to be added
|
||||
@ -23,35 +23,14 @@ There is more information on how to use dbt docs in the last section of this doc
|
||||
|
||||
**Fact Tables:**
|
||||
|
||||
- [fact_blocks](#!/model/model.aurora.core__fact_blocks)
|
||||
- [fact_transactions](#!/model/model.aurora.core__fact_transactions)
|
||||
- [fact_logs](#!/model/model.aurora.core__fact_logs)
|
||||
- [fact_receipts](#!/model/model.aurora.core__fact_receipts)
|
||||
- [fact_blocks](#!/model/model.aurora_models.core__fact_blocks)
|
||||
- [fact_transactions](#!/model/model.aurora_models.core__fact_transactions)
|
||||
- [fact_logs](#!/model/model.aurora_models.core__fact_logs)
|
||||
- [fact_receipts](#!/model/model.aurora_models.core__fact_receipts)
|
||||
|
||||
**Stats EZ Tables:**
|
||||
|
||||
## **⚠️ Aurora Data Notes and Known Issues**
|
||||
*Update: July 27, 2023:*
|
||||
We have identified 5 primary issues with the data coming from the node, and a path forward to resolution after discussions with the Aurora Dev team:
|
||||
1. Duplicated Transactions
|
||||
- Aurora Node issue, fix in progress by Aurora devs. Resolution timeline: 1-2 weeks
|
||||
1. Reverted Transactions: Transactions or events that have been previously canceled or reverted are still being received from the chain.
|
||||
- Aurora Node issue, fix in progress by Aurora devs. Resolution timeline: 1-2 weeks
|
||||
1. Inaccurate Data: We've detected some inaccuracies in the data, namely incorrect timestamps (10M first block set to 1970) and transactions appearing in the wrong blocks.
|
||||
- This is not a data integrity issue. Aurora contains pre-history and only launched as a public blockchain with **block 37,157,757**. Early blocks contain incomplete data, such as `0x0` as the block timestamp.
|
||||
1. Incomplete Data: We've noticed certain transaction data missing, particularly with regards to received transactions. Incomplete data includes some blocks get read with wrong txs count and txs info there, we believed that is from the out of sync status of the node which needs a full backfill again when the node is back sync
|
||||
- This is likely due to our current node provider using an outdated version of the Aurora RPC package. Resolution: change node provider.
|
||||
1. Block Confirmation Discrepancies: Transactions were confirmed on different blocks than those indicated in Explorer.
|
||||
- This is likely due to our current node provider using an outdated version of the Aurora RPC package. Resolution: change node provider.
|
||||
|
||||
Our plan of action is (likely) to move to a dedicated node provided by Aurora which will solve the 4 major issues with a single decision. This timeline is dependent on the patch by Aurora, and our timeline will be updated as we learn more.
|
||||
|
||||
|
||||
*Update: July 20, 2023:*
|
||||
In onboarding Aurora data, our team has encountered several issues with data returned from the node. These are primarily associated with transactions that are either reverted or cancelled. At present, the node returns these transactions across multiple blocks and in different positions within the block at each time. This is uncommon, as the position should be constant. We may see pending transactions within a block on other EVMs, but on re-request the transaction would be finalized. These seem to be persistent across multiple blocks, even in subsequent requests.
|
||||
|
||||
At present, these transactions are included in our data. They will have `null` fields like status, fee, and others that are typically derived from receipts. These transactions do not have receipts, so we can identify them through their lack of receipt data.
|
||||
|
||||
Flipside is working closely with Near and Aurora to determine how this data should best be presented.
|
||||
- [ez_core_metrics_hourly](#!/model/model.aurora_models.ez_core_metrics_hourly)
|
||||
|
||||
## **Data Model Overview**
|
||||
|
||||
@ -93,4 +72,43 @@ Note that you can also right-click on models to interactively filter and explore
|
||||
- [Github](https://github.com/FlipsideCrypto/aurora-models)
|
||||
- [What is dbt?](https://docs.getdbt.com/docs/introduction)
|
||||
|
||||
## Archived Notices
|
||||
|
||||
### **⚠️ Aurora Data Notes and Known Issues**
|
||||
|
||||
_Final Update_
|
||||
We have switched to a node hosted by the Aurora team, and they pushed relevant fixes. The below issues are since resolved.
|
||||
|
||||
_Update: July 27, 2023:_
|
||||
We have identified 5 primary issues with the data coming from the node, and a path forward to resolution after discussions with the Aurora Dev team:
|
||||
|
||||
1. Duplicated Transactions
|
||||
|
||||
- Aurora Node issue, fix in progress by Aurora devs. Resolution timeline: 1-2 weeks
|
||||
|
||||
1. Reverted Transactions: Transactions or events that have been previously canceled or reverted are still being received from the chain.
|
||||
|
||||
- Aurora Node issue, fix in progress by Aurora devs. Resolution timeline: 1-2 weeks
|
||||
|
||||
1. Inaccurate Data: We've detected some inaccuracies in the data, namely incorrect timestamps (10M first block set to 1970) and transactions appearing in the wrong blocks.
|
||||
|
||||
- This is not a data integrity issue. Aurora contains pre-history and only launched as a public blockchain with **block 37,157,757**. Early blocks contain incomplete data, such as `0x0` as the block timestamp.
|
||||
|
||||
1. Incomplete Data: We've noticed certain transaction data missing, particularly with regards to received transactions. Incomplete data includes some blocks get read with wrong txs count and txs info there, we believed that is from the out of sync status of the node which needs a full backfill again when the node is back sync
|
||||
|
||||
- This is likely due to our current node provider using an outdated version of the Aurora RPC package. Resolution: change node provider.
|
||||
|
||||
1. Block Confirmation Discrepancies: Transactions were confirmed on different blocks than those indicated in Explorer.
|
||||
|
||||
- This is likely due to our current node provider using an outdated version of the Aurora RPC package. Resolution: change node provider.
|
||||
|
||||
Our plan of action is (likely) to move to a dedicated node provided by Aurora which will solve the 4 major issues with a single decision. This timeline is dependent on the patch by Aurora, and our timeline will be updated as we learn more.
|
||||
|
||||
_Update: July 20, 2023:_
|
||||
In onboarding Aurora data, our team has encountered several issues with data returned from the node. These are primarily associated with transactions that are either reverted or cancelled. At present, the node returns these transactions across multiple blocks and in different positions within the block at each time. This is uncommon, as the position should be constant. We may see pending transactions within a block on other EVMs, but on re-request the transaction would be finalized. These seem to be persistent across multiple blocks, even in subsequent requests.
|
||||
|
||||
At present, these transactions are included in our data. They will have `null` fields like status, fee, and others that are typically derived from receipts. These transactions do not have receipts, so we can identify them through their lack of receipt data.
|
||||
|
||||
Flipside is working closely with Near and Aurora to determine how this data should best be presented.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
5
models/descriptions/active_day.md
Normal file
5
models/descriptions/active_day.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs active_day %}
|
||||
|
||||
Date of activity.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +1,7 @@
|
||||
{% docs blocks_table_doc %}
|
||||
|
||||
This table contains block level data for the Aurora Blockchain. This table can be used to analyze trends at a block level, for example gas fees vs. total transactions over time. For more information, please see Aurora scan Resources.
|
||||
This table contains block level data for the Aurora Blockchain. This table can be used to analyze trends at a block level, for example gas fees vs. total transactions over time. For more information, please see Aurora scan Resources.
|
||||
|
||||
Please note, Aurora contains pre-history and only launched as a public blockchain with block 37,157,757. Early blocks contain incomplete data, such as 0x0 as the block timestamp and no transactions.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
@ -1,17 +0,0 @@
|
||||
{% docs pk %}
|
||||
|
||||
The unique identifier for each row in the table.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs inserted_timestamp %}
|
||||
|
||||
The utc timestamp at which the row was inserted into the table.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs modified_timestamp %}
|
||||
|
||||
The utc timestamp at which the row was last modified.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/id.md
Normal file
5
models/descriptions/id.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs id %}
|
||||
|
||||
A unique identifier for the record.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/inserted_timestamp.md
Normal file
5
models/descriptions/inserted_timestamp.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs inserted_timestamp %}
|
||||
|
||||
The timestamp at which the record was initially created and inserted into this table.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/invocation_id.md
Normal file
5
models/descriptions/invocation_id.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs invocation_id %}
|
||||
|
||||
A job ID to identify the run that last modified a record.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/maa.md
Normal file
5
models/descriptions/maa.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs maa %}
|
||||
|
||||
Monthly Active Accounts (wallets), as determined by transaction signers.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/modified_timestamp.md
Normal file
5
models/descriptions/modified_timestamp.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs modified_timestamp %}
|
||||
|
||||
The timestamp at which this record was last modified by an internal process.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/pk_id.md
Normal file
5
models/descriptions/pk_id.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs pk_id %}
|
||||
|
||||
A uniquely generated identifier assigned by a surrogate key
|
||||
|
||||
{% enddocs %}
|
||||
17
models/descriptions/precise_amounts.md
Normal file
17
models/descriptions/precise_amounts.md
Normal file
@ -0,0 +1,17 @@
|
||||
{% docs precise_amount_unadjusted %}
|
||||
|
||||
The precise, unadjusted amount of the transaction. This is returned as a string to avoid precision loss.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs precise_amount_adjusted %}
|
||||
|
||||
The precise, adjusted amount of the transaction. This is returned as a string to avoid precision loss.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs tx_fee_precise %}
|
||||
|
||||
The precise amount of the transaction fee. This is returned as a string to avoid precision loss.
|
||||
|
||||
{% enddocs %}
|
||||
65
models/descriptions/stats_core.md
Normal file
65
models/descriptions/stats_core.md
Normal file
@ -0,0 +1,65 @@
|
||||
{% docs ez_core_metrics_hourly_table_doc %}
|
||||
|
||||
A convenience table that aggregates block and transaction related metrics using various aggregate functions such as SUM, COUNT, MIN and MAX from the fact_transactions table, on an hourly basis. Stats for the current hour will be updated as new data arrives.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs block_timestamp_hour %}
|
||||
|
||||
The hour of the timestamp of the block.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs block_id_min %}
|
||||
|
||||
The minimum block id in the hour.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs block_id_max %}
|
||||
|
||||
The maximum block id in the hour.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs block_count %}
|
||||
|
||||
The number of blocks in the hour.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs transaction_count %}
|
||||
|
||||
The number of transactions in the hour.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs transaction_count_success %}
|
||||
|
||||
The number of successful transactions in the hour.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs transaction_count_failed %}
|
||||
|
||||
The number of failed transactions in the hour.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs unique_from_count %}
|
||||
|
||||
The number of unique tx_from addresses in the hour.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs total_fees_native %}
|
||||
|
||||
The sum of all fees in the hour, in the native fee currency.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs total_fees_usd %}
|
||||
|
||||
The sum of all fees in the hour, in USD.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/traces_block_no.md
Normal file
5
models/descriptions/traces_block_no.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs traces_block_no %}
|
||||
|
||||
The block number of this transaction.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/traces_blocktime.md
Normal file
5
models/descriptions/traces_blocktime.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs traces_blocktime %}
|
||||
|
||||
The block timestamp of this transaction.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/traces_call_data.md
Normal file
5
models/descriptions/traces_call_data.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs traces_call_data %}
|
||||
|
||||
The raw JSON data for this trace.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/traces_from.md
Normal file
5
models/descriptions/traces_from.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs traces_from %}
|
||||
|
||||
The sending address of this trace. This is not necessarily the from address of the transaction.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/traces_gas.md
Normal file
5
models/descriptions/traces_gas.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs traces_gas %}
|
||||
|
||||
The gas supplied for this trace.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/traces_gas_used.md
Normal file
5
models/descriptions/traces_gas_used.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs traces_gas_used %}
|
||||
|
||||
The gas used for this trace.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/traces_identifier.md
Normal file
5
models/descriptions/traces_identifier.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs traces_identifier %}
|
||||
|
||||
This field represents the position and type of the trace within the transaction.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/traces_index.md
Normal file
5
models/descriptions/traces_index.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs trace_index %}
|
||||
|
||||
The index of the trace within the transaction.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/traces_input.md
Normal file
5
models/descriptions/traces_input.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs traces_input %}
|
||||
|
||||
The input data for this trace.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/traces_output.md
Normal file
5
models/descriptions/traces_output.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs traces_output %}
|
||||
|
||||
The output data for this trace.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/traces_sub.md
Normal file
5
models/descriptions/traces_sub.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs traces_sub %}
|
||||
|
||||
The amount of nested sub traces for this trace.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/traces_table_doc.md
Normal file
5
models/descriptions/traces_table_doc.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs traces_table_doc %}
|
||||
|
||||
This table contains flattened trace data for internal contract calls on the Ethereum blockchain. Hex encoded fields can be decoded to integers by using `TO_NUMBER(<FIELD>, 'XXXXXXXXXXXX')`, with the number of Xs being the max length of the encoded field. You must also remove the `0x` from your field to use the `TO_NUMBER()` function, if applicable.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/traces_to.md
Normal file
5
models/descriptions/traces_to.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs traces_to %}
|
||||
|
||||
The receiving address of this trace. This is not necessarily the to address of the transaction.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/traces_tx_hash.md
Normal file
5
models/descriptions/traces_tx_hash.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs traces_tx_hash %}
|
||||
|
||||
The transaction hash for the trace. Please note, this is not necessarily unique in this table as transactions frequently have multiple traces.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/traces_type.md
Normal file
5
models/descriptions/traces_type.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs traces_type %}
|
||||
|
||||
The type of internal transaction. Common trace types are `CALL`, `DELEGATECALL`, and `STATICCALL`.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/traces_value.md
Normal file
5
models/descriptions/traces_value.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs traces_value %}
|
||||
|
||||
The amount of ETH transferred in this trace.
|
||||
|
||||
{% enddocs %}
|
||||
@ -1,5 +1,7 @@
|
||||
{% docs tx_table_doc %}
|
||||
|
||||
This table contains transaction level data for the Aurora.
|
||||
This table contains transaction level data for the Aurora.
|
||||
|
||||
Please note, Aurora contains pre-history and only launched as a public blockchain with block 37,157,757. Early blocks contain incomplete data, such as 0x0 as the block timestamp and no transactions.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
@ -0,0 +1,6 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
tags = ['gha_tasks']
|
||||
) }}
|
||||
|
||||
{{ fsc_utils.gha_task_current_status_view() }}
|
||||
@ -0,0 +1,16 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: github_actions__current_task_status
|
||||
columns:
|
||||
- name: PIPELINE_ACTIVE
|
||||
tests:
|
||||
- dbt_expectations.expect_column_values_to_be_in_set:
|
||||
value_set:
|
||||
- TRUE
|
||||
- name: SUCCESSES
|
||||
tests:
|
||||
- dbt_expectations.expect_column_values_to_be_in_set:
|
||||
value_set:
|
||||
- 2
|
||||
config:
|
||||
severity: warn
|
||||
5
models/github_actions/github_actions__task_history.sql
Normal file
5
models/github_actions/github_actions__task_history.sql
Normal file
@ -0,0 +1,5 @@
|
||||
{{ config(
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
{{ fsc_utils.gha_task_history_view() }}
|
||||
@ -0,0 +1,5 @@
|
||||
{{ config(
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
{{ fsc_utils.gha_task_performance_view() }}
|
||||
5
models/github_actions/github_actions__task_schedule.sql
Normal file
5
models/github_actions/github_actions__task_schedule.sql
Normal file
@ -0,0 +1,5 @@
|
||||
{{ config(
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
{{ fsc_utils.gha_task_schedule_view() }}
|
||||
5
models/github_actions/github_actions__tasks.sql
Normal file
5
models/github_actions/github_actions__tasks.sql
Normal file
@ -0,0 +1,5 @@
|
||||
{{ config(
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
{{ fsc_utils.gha_tasks_view() }}
|
||||
13
models/gold/atlas/atlas__fact_maas.sql
Normal file
13
models/gold/atlas/atlas__fact_maas.sql
Normal file
@ -0,0 +1,13 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
tags = ['atlas']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
atlas_maa_id AS fact_maas_id,
|
||||
day,
|
||||
maa,
|
||||
inserted_timestamp,
|
||||
modified_timestamp
|
||||
FROM
|
||||
{{ ref('silver__atlas_maa') }}
|
||||
34
models/gold/atlas/atlas__fact_maas.yml
Normal file
34
models/gold/atlas/atlas__fact_maas.yml
Normal file
@ -0,0 +1,34 @@
|
||||
version: 2
|
||||
|
||||
models:
|
||||
- name: atlas__fact_maas
|
||||
description: |-
|
||||
Monthly Active Accounts (wallets) on Aurora, calculated over a rolling 30 day window. An active account, here, is defined as the signing of at least one transaction.
|
||||
|
||||
columns:
|
||||
- name: FACT_MAAS_ID
|
||||
description: "{{ doc('id') }}"
|
||||
tests:
|
||||
- not_null
|
||||
- unique
|
||||
|
||||
- name: DAY
|
||||
description: "{{ doc('active_day') }}"
|
||||
tests:
|
||||
- not_null
|
||||
- unique
|
||||
|
||||
- name: MAA
|
||||
description: "{{ doc('maa')}}"
|
||||
tests:
|
||||
- not_null
|
||||
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: "{{ doc('inserted_timestamp') }}"
|
||||
tests:
|
||||
- not_null
|
||||
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: "{{ doc('modified_timestamp') }}"
|
||||
tests:
|
||||
- not_null
|
||||
@ -61,6 +61,18 @@ SELECT
|
||||
transactions_root,
|
||||
'uncles',
|
||||
uncles
|
||||
) AS block_header_json
|
||||
) AS block_header_json,
|
||||
COALESCE (
|
||||
block_id,
|
||||
{{ dbt_utils.generate_surrogate_key(['block_number']) }}
|
||||
) AS fact_blocks_id,
|
||||
COALESCE (
|
||||
inserted_timestamp,
|
||||
_inserted_timestamp
|
||||
) AS inserted_timestamp,
|
||||
COALESCE (
|
||||
modified_timestamp,
|
||||
_inserted_timestamp
|
||||
) AS modified_timestamp
|
||||
FROM
|
||||
{{ ref('silver__blocks') }}
|
||||
{{ ref('silver__blocks') }}
|
||||
@ -42,3 +42,9 @@ models:
|
||||
description: '{{ doc("uncle_blocks") }}'
|
||||
- name: BLOCK_HEADER_JSON
|
||||
description: '{{ doc("block_header_json") }}'
|
||||
- name: FACT_BLOCKS_ID
|
||||
description: '{{ doc("pk_id") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: "{{ doc('inserted_timestamp') }}"
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: "{{ doc('modified_timestamp') }}"
|
||||
35
models/gold/core/core__fact_logs.sql
Normal file
35
models/gold/core/core__fact_logs.sql
Normal file
@ -0,0 +1,35 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
persist_docs ={ "relation": true,
|
||||
"columns": true },
|
||||
tags = ['core']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
block_number,
|
||||
block_timestamp,
|
||||
tx_hash,
|
||||
origin_function_signature,
|
||||
origin_from_address,
|
||||
origin_to_address,
|
||||
event_index,
|
||||
contract_address,
|
||||
topics,
|
||||
DATA,
|
||||
event_removed,
|
||||
tx_status,
|
||||
_log_id,
|
||||
COALESCE (
|
||||
logs_id,
|
||||
{{ dbt_utils.generate_surrogate_key(['BLOCK_NUMBER','tx_hash', '_LOG_ID']) }}
|
||||
) AS fact_logs_id,
|
||||
COALESCE (
|
||||
inserted_timestamp,
|
||||
_inserted_timestamp
|
||||
) AS inserted_timestamp,
|
||||
COALESCE (
|
||||
modified_timestamp,
|
||||
_inserted_timestamp
|
||||
) AS modified_timestamp
|
||||
FROM
|
||||
{{ ref('silver__logs') }}
|
||||
@ -30,3 +30,9 @@ models:
|
||||
description: "The from address at the transaction level."
|
||||
- name: ORIGIN_TO_ADDRESS
|
||||
description: "The to address at the transaction level."
|
||||
- name: FACT_LOGS_ID
|
||||
description: '{{ doc("pk_id") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: "{{ doc('inserted_timestamp') }}"
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: "{{ doc('modified_timestamp') }}"
|
||||
39
models/gold/core/core__fact_receipts.sql
Normal file
39
models/gold/core/core__fact_receipts.sql
Normal file
@ -0,0 +1,39 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
persist_docs ={ "relation": true,
|
||||
"columns": true },
|
||||
tags = ['core']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
block_number,
|
||||
block_timestamp,
|
||||
block_hash,
|
||||
cumulative_gas_used,
|
||||
from_address,
|
||||
to_address,
|
||||
gas_used,
|
||||
logs,
|
||||
logs_bloom,
|
||||
status,
|
||||
tx_success,
|
||||
tx_status,
|
||||
tx_hash,
|
||||
POSITION,
|
||||
TYPE,
|
||||
near_receipt_hash,
|
||||
near_transaction_hash,
|
||||
COALESCE (
|
||||
receipts_id,
|
||||
{{ dbt_utils.generate_surrogate_key(['BLOCK_NUMBER', 'TX_HASH']) }}
|
||||
) AS fact_receipts_id,
|
||||
COALESCE (
|
||||
inserted_timestamp,
|
||||
_inserted_timestamp
|
||||
) AS inserted_timestamp,
|
||||
COALESCE (
|
||||
modified_timestamp,
|
||||
_inserted_timestamp
|
||||
) AS modified_timestamp
|
||||
FROM
|
||||
{{ ref('silver__receipts') }}
|
||||
@ -36,3 +36,9 @@ models:
|
||||
description: "Receipt hash on NEAR blockchain"
|
||||
- name: NEAR_TRANSACTION_HASH
|
||||
description: "Transaction hash on NEAR blockchain"
|
||||
- name: FACT_RECEIPTS_ID
|
||||
description: '{{ doc("pk_id") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: "{{ doc('inserted_timestamp') }}"
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: "{{ doc('modified_timestamp') }}"
|
||||
44
models/gold/core/core__fact_traces.sql
Normal file
44
models/gold/core/core__fact_traces.sql
Normal file
@ -0,0 +1,44 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
persist_docs ={ "relation": true,
|
||||
"columns": true },
|
||||
tags = ['core']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
tx_hash,
|
||||
block_number,
|
||||
block_timestamp,
|
||||
from_address,
|
||||
to_address,
|
||||
value,
|
||||
value_precise_raw,
|
||||
value_precise,
|
||||
gas,
|
||||
gas_used,
|
||||
input,
|
||||
output,
|
||||
TYPE,
|
||||
identifier,
|
||||
DATA,
|
||||
tx_status,
|
||||
sub_traces,
|
||||
trace_status,
|
||||
error_reason,
|
||||
trace_index,
|
||||
COALESCE (
|
||||
traces_id,
|
||||
{{ dbt_utils.generate_surrogate_key(
|
||||
['tx_hash', 'trace_index']
|
||||
) }}
|
||||
) AS fact_traces_id,
|
||||
COALESCE(
|
||||
inserted_timestamp,
|
||||
'2000-01-01'
|
||||
) AS inserted_timestamp,
|
||||
COALESCE(
|
||||
modified_timestamp,
|
||||
'2000-01-01'
|
||||
) AS modified_timestamp
|
||||
FROM
|
||||
{{ ref('silver__traces') }}
|
||||
52
models/gold/core/core__fact_traces.yml
Normal file
52
models/gold/core/core__fact_traces.yml
Normal file
@ -0,0 +1,52 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: core__fact_traces
|
||||
description: '{{ doc("traces_table_doc") }}'
|
||||
|
||||
columns:
|
||||
- name: BLOCK_NUMBER
|
||||
description: '{{ doc("traces_block_no") }}'
|
||||
- name: BLOCK_TIMESTAMP
|
||||
description: '{{ doc("traces_blocktime") }}'
|
||||
- name: TX_HASH
|
||||
description: '{{ doc("traces_tx_hash") }}'
|
||||
- name: FROM_ADDRESS
|
||||
description: '{{ doc("traces_from") }}'
|
||||
- name: TO_ADDRESS
|
||||
description: '{{ doc("traces_to") }}'
|
||||
- name: VALUE
|
||||
description: '{{ doc("traces_value") }}'
|
||||
- name: VALUE_PRECISE_RAW
|
||||
description: '{{ doc("precise_amount_unadjusted") }}'
|
||||
- name: VALUE_PRECISE
|
||||
description: '{{ doc("precise_amount_adjusted") }}'
|
||||
- name: GAS
|
||||
description: '{{ doc("traces_gas") }}'
|
||||
- name: GAS_USED
|
||||
description: '{{ doc("traces_gas_used") }}'
|
||||
- name: INPUT
|
||||
description: '{{ doc("traces_input") }}'
|
||||
- name: OUTPUT
|
||||
description: '{{ doc("traces_output") }}'
|
||||
- name: TYPE
|
||||
description: '{{ doc("traces_type") }}'
|
||||
- name: IDENTIFIER
|
||||
description: '{{ doc("traces_identifier") }}'
|
||||
- name: DATA
|
||||
description: '{{ doc("traces_call_data") }}'
|
||||
- name: TX_STATUS
|
||||
description: '{{ doc("tx_status") }}'
|
||||
- name: SUB_TRACES
|
||||
description: '{{ doc("traces_sub") }}'
|
||||
- name: TRACE_STATUS
|
||||
description: The status of the trace, either `SUCCESS` or `FAIL`
|
||||
- name: ERROR_REASON
|
||||
description: The reason for the trace failure, if any.
|
||||
- name: TRACE_INDEX
|
||||
description: The index of the trace within the transaction.
|
||||
- name: FACT_TRACES_ID
|
||||
description: '{{ doc("pk_id") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: '{{ doc("modified_timestamp") }}'
|
||||
@ -28,6 +28,18 @@ SELECT
|
||||
r,
|
||||
s,
|
||||
v,
|
||||
tx_type
|
||||
tx_type,
|
||||
COALESCE (
|
||||
tx_id,
|
||||
{{ dbt_utils.generate_surrogate_key(['BLOCK_NUMBER', 'TX_HASH', 'POSITION']) }}
|
||||
) AS fact_transactions_id,
|
||||
COALESCE (
|
||||
inserted_timestamp,
|
||||
_inserted_timestamp
|
||||
) AS inserted_timestamp,
|
||||
COALESCE (
|
||||
modified_timestamp,
|
||||
_inserted_timestamp
|
||||
) AS modified_timestamp
|
||||
FROM
|
||||
{{ ref('silver__transactions') }}
|
||||
{{ ref('silver__transactions') }}
|
||||
@ -50,3 +50,9 @@ models:
|
||||
description: The s value of the transaction signature.
|
||||
- name: v
|
||||
description: The v value of the transaction signature.
|
||||
- name: FACT_TRANSACTIONS_ID
|
||||
description: '{{ doc("pk_id") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: "{{ doc('inserted_timestamp') }}"
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: "{{ doc('modified_timestamp') }}"
|
||||
@ -1,23 +0,0 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
persist_docs ={ "relation": true,
|
||||
"columns": true },
|
||||
tags = ['core']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
block_number,
|
||||
block_timestamp,
|
||||
tx_hash,
|
||||
origin_function_signature,
|
||||
origin_from_address,
|
||||
origin_to_address,
|
||||
event_index,
|
||||
contract_address,
|
||||
topics,
|
||||
DATA,
|
||||
event_removed,
|
||||
tx_status,
|
||||
_log_id
|
||||
FROM
|
||||
{{ ref('silver__logs') }}
|
||||
@ -1,27 +0,0 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
persist_docs ={ "relation": true,
|
||||
"columns": true },
|
||||
tags = ['core']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
block_number,
|
||||
block_timestamp,
|
||||
block_hash,
|
||||
cumulative_gas_used,
|
||||
from_address,
|
||||
to_address,
|
||||
gas_used,
|
||||
logs,
|
||||
logs_bloom,
|
||||
status,
|
||||
tx_success,
|
||||
tx_status,
|
||||
tx_hash,
|
||||
POSITION,
|
||||
TYPE,
|
||||
near_receipt_hash,
|
||||
near_transaction_hash
|
||||
FROM
|
||||
{{ ref('silver__receipts') }}
|
||||
@ -7,13 +7,13 @@
|
||||
SELECT
|
||||
token_address,
|
||||
asset_id,
|
||||
A.symbol,
|
||||
A.name,
|
||||
symbol,
|
||||
name,
|
||||
platform AS blockchain,
|
||||
platform_id AS blockchain_id,
|
||||
provider,
|
||||
A.inserted_timestamp,
|
||||
A.modified_timestamp,
|
||||
A.complete_provider_asset_metadata_id AS dim_asset_metadata_id
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_provider_asset_metadata_id AS dim_asset_metadata_id
|
||||
FROM
|
||||
{{ ref('silver__complete_provider_asset_metadata') }} A
|
||||
{{ ref('silver__complete_provider_asset_metadata') }}
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: price__dim_asset_metadata
|
||||
description: '{{ doc("prices_dim_asset_metadata_table_doc") }}'
|
||||
|
||||
columns:
|
||||
- name: PROVIDER
|
||||
description: '{{ doc("prices_provider")}}'
|
||||
- name: ID
|
||||
description: '{{ doc("prices_id_deprecation") }}'
|
||||
- name: ASSET_ID
|
||||
description: '{{ doc("prices_asset_id") }}'
|
||||
- name: NAME
|
||||
@ -18,11 +18,6 @@ models:
|
||||
description: '{{ doc("prices_blockchain") }}'
|
||||
- name: BLOCKCHAIN_ID
|
||||
description: '{{ doc("prices_blockchain_id") }}'
|
||||
- name: DECIMALS
|
||||
description: '{{ doc("prices_decimals_deprecation") }}'
|
||||
- name: DIM_ASSET_METADATA_ID
|
||||
description: '{{ doc("pk") }}'
|
||||
description: '{{ doc("pk_id") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: '{{ doc("modified_timestamp") }}'
|
||||
|
||||
@ -12,9 +12,22 @@ SELECT
|
||||
decimals,
|
||||
blockchain,
|
||||
FALSE AS is_native,
|
||||
is_deprecated,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_token_asset_metadata_id AS ez_asset_metadata_id
|
||||
FROM
|
||||
{{ ref('silver__complete_token_asset_metadata') }}
|
||||
{{ ref('silver__complete_token_asset_metadata') }}
|
||||
UNION ALL
|
||||
SELECT
|
||||
NULL AS token_address,
|
||||
asset_id,
|
||||
symbol,
|
||||
NAME,
|
||||
decimals,
|
||||
blockchain,
|
||||
TRUE AS is_native,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_native_asset_metadata_id AS ez_asset_metadata_id
|
||||
FROM
|
||||
{{ ref('silver__complete_native_asset_metadata') }}
|
||||
|
||||
@ -1,10 +1,9 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: price__ez_asset_metadata
|
||||
description: '{{ doc("prices_ez_asset_metadata_table_doc") }}'
|
||||
|
||||
columns:
|
||||
- name: ID
|
||||
description: '{{ doc("prices_id_deprecation")}}'
|
||||
- name: ASSET_ID
|
||||
description: '{{ doc("prices_asset_id") }}'
|
||||
- name: NAME
|
||||
@ -19,10 +18,8 @@ models:
|
||||
description: '{{ doc("prices_decimals") }}'
|
||||
- name: IS_NATIVE
|
||||
description: '{{ doc("prices_is_native") }}'
|
||||
- name: IS_DEPRECATED
|
||||
description: '{{ doc("prices_is_deprecated") }}'
|
||||
- name: EZ_ASSET_METADATA_ID
|
||||
description: '{{ doc("pk") }}'
|
||||
description: '{{ doc("pk_id") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
|
||||
@ -14,9 +14,24 @@ SELECT
|
||||
blockchain,
|
||||
FALSE AS is_native,
|
||||
is_imputed,
|
||||
is_deprecated,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_token_prices_id AS ez_prices_hourly_id
|
||||
FROM
|
||||
{{ ref('silver__complete_token_prices') }}
|
||||
{{ ref('silver__complete_token_prices') }}
|
||||
UNION ALL
|
||||
SELECT
|
||||
HOUR,
|
||||
NULL AS token_address,
|
||||
symbol,
|
||||
NAME,
|
||||
decimals,
|
||||
price,
|
||||
blockchain,
|
||||
TRUE AS is_native,
|
||||
is_imputed,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_native_prices_id AS ez_prices_hourly_id
|
||||
FROM
|
||||
{{ ref('silver__complete_native_prices') }}
|
||||
|
||||
@ -23,7 +23,7 @@ models:
|
||||
- name: IS_DEPRECATED
|
||||
description: '{{ doc("prices_is_deprecated") }}'
|
||||
- name: EZ_PRICES_HOURLY_ID
|
||||
description: '{{ doc("pk") }}'
|
||||
description: '{{ doc("pk_id") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
|
||||
@ -16,4 +16,4 @@ SELECT
|
||||
modified_timestamp,
|
||||
complete_provider_prices_id AS fact_prices_ohlc_hourly_id
|
||||
FROM
|
||||
{{ ref('silver__complete_provider_prices') }}
|
||||
{{ ref('silver__complete_provider_prices') }}
|
||||
|
||||
@ -17,7 +17,7 @@ models:
|
||||
- name: CLOSE
|
||||
description: '{{ doc("prices_close") }}'
|
||||
- name: FACT_PRICES_OHLC_HOURLY_ID
|
||||
description: '{{ doc("pk") }}'
|
||||
description: '{{ doc("pk_id") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
|
||||
60
models/gold/stats/stats__ez_core_metrics_hourly.sql
Normal file
60
models/gold/stats/stats__ez_core_metrics_hourly.sql
Normal file
@ -0,0 +1,60 @@
|
||||
{{ config(
|
||||
materialized = 'view',
|
||||
tags = ['core'],
|
||||
meta ={ 'database_tags':{ 'table':{ 'PURPOSE': 'STATS, METRICS, CORE, HOURLY',
|
||||
}} }
|
||||
) }}
|
||||
|
||||
WITH txs AS (
|
||||
|
||||
SELECT
|
||||
block_timestamp_hour,
|
||||
transaction_count,
|
||||
transaction_count_success,
|
||||
transaction_count_failed,
|
||||
unique_from_count,
|
||||
total_fees AS total_fees_native,
|
||||
LAST_VALUE(
|
||||
p.price ignore nulls
|
||||
) over (
|
||||
ORDER BY
|
||||
block_timestamp_hour rows unbounded preceding
|
||||
) AS imputed_close,
|
||||
core_metrics_hourly_id AS ez_core_metrics_hourly_id,
|
||||
s.inserted_timestamp AS inserted_timestamp,
|
||||
s.modified_timestamp AS modified_timestamp
|
||||
FROM
|
||||
{{ ref('silver_stats__core_metrics_hourly') }}
|
||||
s
|
||||
LEFT JOIN {{ ref('silver__complete_token_prices') }}
|
||||
p
|
||||
ON s.block_timestamp_hour = p.hour
|
||||
AND p.asset_id = 'aurora-near'
|
||||
)
|
||||
SELECT
|
||||
A.block_timestamp_hour,
|
||||
A.block_number_min,
|
||||
A.block_number_max,
|
||||
A.block_count,
|
||||
b.transaction_count,
|
||||
b.transaction_count_success,
|
||||
b.transaction_count_failed,
|
||||
b.unique_from_count,
|
||||
b.total_fees_native,
|
||||
ROUND(
|
||||
b.total_fees_native * b.imputed_close,
|
||||
2
|
||||
) AS total_fees_usd,
|
||||
A.core_metrics_block_hourly_id AS ez_core_metrics_hourly_id,
|
||||
GREATEST(
|
||||
A.inserted_timestamp,
|
||||
b.inserted_timestamp
|
||||
) AS inserted_timestamp,
|
||||
GREATEST(
|
||||
A.modified_timestamp,
|
||||
b.modified_timestamp
|
||||
) AS modified_timestamp
|
||||
FROM
|
||||
{{ ref('silver_stats__core_metrics_block_hourly') }} A
|
||||
JOIN txs b
|
||||
ON A.block_timestamp_hour = b.block_timestamp_hour
|
||||
32
models/gold/stats/stats__ez_core_metrics_hourly.yml
Normal file
32
models/gold/stats/stats__ez_core_metrics_hourly.yml
Normal file
@ -0,0 +1,32 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: stats__ez_core_metrics_hourly
|
||||
description: '{{ doc("ez_core_metrics_hourly_table_doc") }}'
|
||||
|
||||
columns:
|
||||
- name: BLOCK_TIMESTAMP_HOUR
|
||||
description: '{{ doc("block_timestamp_hour") }}'
|
||||
- name: BLOCK_NUMBER_MIN
|
||||
description: '{{ doc("block_id_min") }}'
|
||||
- name: BLOCK_NUMBER_MAX
|
||||
description: '{{ doc("block_id_max") }}'
|
||||
- name: BLOCK_COUNT
|
||||
description: '{{ doc("block_count") }}'
|
||||
- name: TRANSACTION_COUNT
|
||||
description: '{{ doc("transaction_count") }}'
|
||||
- name: TRANSACTION_COUNT_SUCCESS
|
||||
description: '{{ doc("transaction_count_success") }}'
|
||||
- name: TRANSACTION_COUNT_FAILED
|
||||
description: '{{ doc("transaction_count_failed") }}'
|
||||
- name: UNIQUE_FROM_COUNT
|
||||
description: '{{ doc("unique_from_count") }}'
|
||||
- name: TOTAL_FEES_NATIVE
|
||||
description: '{{ doc("total_fees_native") }}'
|
||||
- name: TOTAL_FEES_USD
|
||||
description: '{{ doc("total_fees_usd") }}'
|
||||
- name: EZ_CORE_METRICS_HOURLY_ID
|
||||
description: '{{ doc("pk_id") }}'
|
||||
- name: INSERTED_TIMESTAMP
|
||||
description: '{{ doc("inserted_timestamp") }}'
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
description: '{{ doc("modified_timestamp") }}'
|
||||
79
models/silver/atlas/silver__atlas_maa.sql
Normal file
79
models/silver/atlas/silver__atlas_maa.sql
Normal file
@ -0,0 +1,79 @@
|
||||
{{ config(
|
||||
materialized = 'incremental',
|
||||
incremental_stratege = 'merge',
|
||||
merge_exclude_columns = ["inserted_timestamp"],
|
||||
unique_key = 'day',
|
||||
tags = ['atlas']
|
||||
) }}
|
||||
|
||||
WITH dates AS (
|
||||
|
||||
SELECT
|
||||
date_day AS DAY
|
||||
FROM
|
||||
{{ source(
|
||||
'crosschain',
|
||||
'dim_dates'
|
||||
) }}
|
||||
|
||||
{% if is_incremental() %}
|
||||
WHERE
|
||||
date_day > (
|
||||
SELECT
|
||||
MAX(DAY)
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
AND date_day < SYSDATE() :: DATE
|
||||
{% else %}
|
||||
WHERE
|
||||
date_day BETWEEN '2020-07-22'
|
||||
AND SYSDATE() :: DATE
|
||||
{% endif %}
|
||||
),
|
||||
txns AS (
|
||||
SELECT
|
||||
block_timestamp :: DATE AS active_day,
|
||||
from_address
|
||||
FROM
|
||||
{{ ref('silver__transactions') }}
|
||||
|
||||
{% if is_incremental() %}
|
||||
WHERE
|
||||
block_timestamp :: DATE >= (
|
||||
SELECT
|
||||
MAX(DAY)
|
||||
FROM
|
||||
{{ this }}
|
||||
) - INTERVAL '30 days'
|
||||
{% endif %}
|
||||
),
|
||||
FINAL AS (
|
||||
SELECT
|
||||
DAY,
|
||||
COUNT(
|
||||
DISTINCT from_address
|
||||
) AS maa
|
||||
FROM
|
||||
dates d
|
||||
LEFT JOIN txns t
|
||||
ON t.active_day < d.day
|
||||
AND t.active_day >= d.day - INTERVAL '30 day'
|
||||
WHERE
|
||||
DAY != CURRENT_DATE()
|
||||
GROUP BY
|
||||
1
|
||||
ORDER BY
|
||||
1 DESC
|
||||
)
|
||||
SELECT
|
||||
{{ dbt_utils.generate_surrogate_key(
|
||||
['day']
|
||||
) }} AS atlas_maa_id,
|
||||
DAY,
|
||||
maa,
|
||||
SYSDATE() AS inserted_timestamp,
|
||||
SYSDATE() AS modified_timestamp,
|
||||
'{{ invocation_id }}' AS _invocation_id
|
||||
FROM
|
||||
FINAL
|
||||
33
models/silver/atlas/silver__atlas_maa.yml
Normal file
33
models/silver/atlas/silver__atlas_maa.yml
Normal file
@ -0,0 +1,33 @@
|
||||
version: 2
|
||||
|
||||
models:
|
||||
- name: silver__atlas_maa
|
||||
description: |-
|
||||
Monthly Active Accounts (wallets) on Aurora, calculated over a rolling 30 day window. An active account, here, is defined as the signing of at least one transaction.
|
||||
|
||||
columns:
|
||||
- name: atlas_maa_id
|
||||
description: "{{ doc('id') }}"
|
||||
tests:
|
||||
- not_null
|
||||
- unique
|
||||
|
||||
- name: day
|
||||
description: "{{ doc('active_day') }}"
|
||||
tests:
|
||||
- not_null
|
||||
- unique
|
||||
|
||||
- name: maa
|
||||
description: "{{ doc('maa')}}"
|
||||
tests:
|
||||
- not_null
|
||||
|
||||
- name: inserted_timestamp
|
||||
description: "{{ doc('inserted_timestamp') }}"
|
||||
|
||||
- name: modified_timestamp
|
||||
description: "{{ doc('modified_timestamp') }}"
|
||||
|
||||
- name: _invocation_id
|
||||
description: "{{ doc('invocation_id') }}"
|
||||
@ -0,0 +1,37 @@
|
||||
{{ config(
|
||||
materialized = 'incremental',
|
||||
incremental_strategy = 'delete+insert',
|
||||
unique_key = 'complete_native_asset_metadata_id',
|
||||
tags = ['non_realtime']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
asset_id,
|
||||
symbol,
|
||||
NAME,
|
||||
decimals,
|
||||
blockchain,
|
||||
is_deprecated,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_native_asset_metadata_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ ref(
|
||||
'bronze__complete_native_asset_metadata'
|
||||
) }}
|
||||
|
||||
{% if is_incremental() %}
|
||||
WHERE
|
||||
modified_timestamp >= (
|
||||
SELECT
|
||||
MAX(
|
||||
modified_timestamp
|
||||
)
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
@ -0,0 +1,24 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: silver__complete_native_asset_metadata
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- SYMBOL
|
||||
|
||||
columns:
|
||||
- name: PROVIDER
|
||||
tests:
|
||||
- not_null
|
||||
- name: SYMBOL
|
||||
tests:
|
||||
- not_null
|
||||
- name: BLOCKCHAIN
|
||||
tests:
|
||||
- not_null
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- name: COMPLETE_NATIVE_ASSET_METADATA_ID
|
||||
tests:
|
||||
- unique
|
||||
40
models/silver/prices/silver__complete_native_prices.sql
Normal file
40
models/silver/prices/silver__complete_native_prices.sql
Normal file
@ -0,0 +1,40 @@
|
||||
{{ config(
|
||||
materialized = 'incremental',
|
||||
incremental_strategy = 'delete+insert',
|
||||
unique_key = 'complete_native_prices_id',
|
||||
tags = ['non_realtime']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
HOUR,
|
||||
asset_id,
|
||||
symbol,
|
||||
NAME,
|
||||
decimals,
|
||||
price,
|
||||
blockchain,
|
||||
is_imputed,
|
||||
is_deprecated,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_native_prices_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ ref(
|
||||
'bronze__complete_native_prices'
|
||||
) }}
|
||||
|
||||
{% if is_incremental() %}
|
||||
WHERE
|
||||
modified_timestamp >= (
|
||||
SELECT
|
||||
MAX(
|
||||
modified_timestamp
|
||||
)
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
37
models/silver/prices/silver__complete_native_prices.yml
Normal file
37
models/silver/prices/silver__complete_native_prices.yml
Normal file
@ -0,0 +1,37 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: silver__complete_native_prices
|
||||
tests:
|
||||
- dbt_utils.unique_combination_of_columns:
|
||||
combination_of_columns:
|
||||
- HOUR
|
||||
- SYMBOL
|
||||
|
||||
columns:
|
||||
- name: HOUR
|
||||
tests:
|
||||
- not_null
|
||||
- name: SYMBOL
|
||||
tests:
|
||||
- not_null
|
||||
- name: BLOCKCHAIN
|
||||
tests:
|
||||
- not_null
|
||||
- name: PROVIDER
|
||||
tests:
|
||||
- not_null
|
||||
- name: PRICE
|
||||
tests:
|
||||
- not_null
|
||||
- name: IS_IMPUTED
|
||||
tests:
|
||||
- not_null
|
||||
- name: _INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- name: MODIFIED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- name: COMPLETE_NATIVE_PRICES_ID
|
||||
tests:
|
||||
- unique
|
||||
@ -23,4 +23,4 @@ models:
|
||||
- not_null
|
||||
- name: COMPLETE_PROVIDER_ASSET_METADATA_ID
|
||||
tests:
|
||||
- unique
|
||||
- unique
|
||||
@ -42,4 +42,4 @@ WHERE
|
||||
|
||||
qualify(ROW_NUMBER() over (PARTITION BY p.asset_id, recorded_hour, p.provider
|
||||
ORDER BY
|
||||
p.modified_timestamp DESC)) = 1
|
||||
p.modified_timestamp DESC)) = 1
|
||||
|
||||
@ -22,4 +22,4 @@ models:
|
||||
- not_null
|
||||
- name: COMPLETE_PROVIDER_PRICES_ID
|
||||
tests:
|
||||
- unique
|
||||
- unique
|
||||
@ -39,4 +39,4 @@ WHERE
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
@ -6,7 +6,7 @@ models:
|
||||
combination_of_columns:
|
||||
- TOKEN_ADDRESS
|
||||
- BLOCKCHAIN
|
||||
|
||||
|
||||
columns:
|
||||
- name: PROVIDER
|
||||
tests:
|
||||
@ -25,4 +25,4 @@ models:
|
||||
- not_null
|
||||
- name: COMPLETE_TOKEN_ASSET_METADATA_ID
|
||||
tests:
|
||||
- unique
|
||||
- unique
|
||||
@ -43,4 +43,4 @@ WHERE
|
||||
FROM
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
@ -25,10 +25,10 @@ models:
|
||||
tests:
|
||||
- not_null
|
||||
- name: PRICE
|
||||
tests:
|
||||
tests:
|
||||
- not_null
|
||||
- name: IS_IMPUTED
|
||||
tests:
|
||||
tests:
|
||||
- not_null
|
||||
- name: _INSERTED_TIMESTAMP
|
||||
tests:
|
||||
@ -38,4 +38,4 @@ models:
|
||||
- not_null
|
||||
- name: COMPLETE_TOKEN_PRICES_ID
|
||||
tests:
|
||||
- unique
|
||||
- unique
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user