initial set up

This commit is contained in:
drethereum 2025-04-30 16:22:28 -06:00
parent acb3d0ddaa
commit eecc8a2ca1
29 changed files with 382 additions and 964 deletions

View File

@ -0,0 +1,27 @@
name: dbt_alter_all_gha_tasks
run-name: dbt_alter_all_gha_tasks
on:
workflow_dispatch:
branches:
- "main"
inputs:
task_action:
type: choice
description: Action to perform on all tasks
required: true
options:
- RESUME
- SUSPEND
default: RESUME
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_alter_all_gha_tasks.yml@pre-release/v4-beta
with:
task_action: ${{ inputs.task_action }}
target: prod
secrets: inherit

View File

@ -11,7 +11,7 @@ on:
description: Name of the workflow to perform the action on, no .yml extension
required: true
task_action:
type: choice
type: choice
description: Action to perform
required: true
options:
@ -19,35 +19,14 @@ on:
- RESUME
default: SUSPEND
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_alter_gha_tasks.yml@AN-4374/upgrade-dbt-1.7
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_alter_gha_task.yml@pre-release/v4-beta
with:
workflow_name: |
${{ inputs.workflow_name }}
task_action: |
${{ inputs.task_action }}
environment: workflow_prod
secrets: inherit
notify-failure:
needs: [called_workflow_template]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
workflow_name: ${{ inputs.workflow_name }}
task_action: ${{ inputs.task_action }}
target: prod
secrets: inherit

View File

@ -6,47 +6,14 @@ on:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_test
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Deploy New Github Actions
run: |
make deploy_new_github_action DBT_TARGET=prod
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Deploy New Github Actions
command: |
make deploy_new_gha_tasks DBT_TARGET=prod
secrets: inherit

View File

@ -5,79 +5,10 @@ on:
branches:
- "main"
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: refresh ddl for datashare
run: |
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
- name: checkout docs branch
run: |
git checkout -B docs origin/main
- name: generate dbt docs
run: dbt docs generate -t prod
- name: move files to docs directory
run: |
mkdir -p ./docs
cp target/{catalog.json,manifest.json,index.html} docs/
- name: clean up target directory
run: dbt clean
- name: check for changes
run: git status
- name: stage changed files
run: git add .
- name: commit changed files
run: |
git config user.email "abc@xyz"
git config user.name "github-actions"
git commit -am "Auto-update docs"
- name: push changes to docs
run: |
git push -f --set-upstream origin docs
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_docs_update.yml@pre-release/v4-beta
secrets: inherit

View File

@ -3,46 +3,20 @@ run-name: ${{ github.event.inputs.branch }}
on:
workflow_dispatch:
branches:
- "main"
inputs:
environment:
required: true
type: string
required: true
concurrency: ${{ github.workflow }}
jobs:
prepare_vars:
runs-on: ubuntu-latest
environment:
name: ${{ inputs.environment }}
outputs:
warehouse: ${{ steps.set_outputs.outputs.warehouse }}
steps:
- name: Set warehouse output
id: set_outputs
run: |
echo "warehouse=${{ vars.WAREHOUSE }}" >> $GITHUB_OUTPUT
notify-failure:
needs: [prepare_vars]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
called_workflow_template:
needs: prepare_vars
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt.yml@main
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_integration_test.yml@pre-release/v4-beta
with:
command: >
target: ${{ inputs.environment }}
command: |
dbt test --selector 'integration_tests'
environment: ${{ inputs.environment }}
warehouse: ${{ needs.prepare_vars.outputs.warehouse }}
secrets: inherit
notify-failure2:
needs: [called_workflow_template]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
secrets: inherit

View File

@ -6,8 +6,8 @@ on:
branches:
- "main"
inputs:
environment:
type: choice
target:
type: choice
description: DBT Run Environment
required: true
options:
@ -15,59 +15,29 @@ on:
- prod
default: dev
warehouse:
type: choice
type: choice
description: Snowflake warehouse
required: true
required: true
options:
- DBT
- DBT_CLOUD
- DBT_EMERGENCY
- DBT_MEGA
default: DBT
dbt_command:
type: string
description: "DBT Run Command"
description: 'DBT Run Command'
required: true
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ inputs.warehouse }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_${{ inputs.environment }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
warehouse: ${{ inputs.warehouse }}
target: ${{ inputs.target }}
command_name: Run DBT Command
command: ${{ inputs.dbt_command }}
secrets: inherit

View File

@ -1,74 +0,0 @@
name: dbt_run_deployment
run-name: ${{ inputs.dbt_command }}
on:
workflow_dispatch:
branches:
- "main"
inputs:
warehouse:
type: choice
description: Snowflake warehouse
required: true
options:
- DBT
- DBT_CLOUD
- DBT_EMERGENCY
default: DBT
dbt_command:
type: string
description: "DBT Run Command"
required: true
env:
DBT_PROFILES_DIR: ./
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ inputs.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
dbt:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}
- name: Run datashare model
run: |
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
dbt run-operation run_query --args "{sql: call admin.datashare.sp_grant_share_permissions('${{ env.DATABASE }}')}"
- name: Store logs
uses: actions/upload-artifact@v3
with:
name: dbt-logs
path: |
logs
target
notify-failure:
needs: [dbt]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -6,77 +6,10 @@ on:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs_refresh:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run Dev Refresh
run: |
dbt run-operation fsc_evm.run_sp_create_prod_clone
notify-failure:
needs: [run_dbt_jobs_refresh]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
run_dbt_jobs_udfs:
runs-on: ubuntu-latest
needs: run_dbt_jobs_refresh
environment:
name: workflow_dev
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run Recreate UDFs
run: |
dbt run-operation fsc_utils.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
dbt run -s livequery_models.deploy.core._live --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
notify-failure2:
needs: [run_dbt_jobs_udfs]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template_dev_refresh.yml@pre-release/v4-beta
secrets: inherit

View File

@ -5,48 +5,18 @@ on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod_2xl
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Observability Models
command: |
dbt run --threads 2 --vars '{"MAIN_OBSERV_FULL_TEST_ENABLED":True}' -m "fsc_evm,tag:observability"
secrets: inherit
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run Observability Models
run: |
dbt run --threads 2 --vars '{"OBSERV_FULL_TEST":True}' -m "fsc_evm,tag:observability"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -0,0 +1,19 @@
name: dbt_run_heal_models
run-name: dbt_run_heal_models
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Heal Models
command: |
dbt run -m "$PROJECT_NAME,tag:heal" --vars '{"HEAL_MODEL":True}'
secrets: inherit

View File

@ -6,51 +6,17 @@ on:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_test
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Update ABI Models
run: |
dbt run -m "fsc_evm,tag:silver_abis" "fsc_evm,tag:gold_abis"
- name: Kick off decoded logs history, if there are new ABIs from users
run: |
dbt run-operation fsc_evm.run_decoded_logs_history
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run ABI Models
command: |
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:gold,tag:abis"
command_name_2: Kick off decoded logs history, if there are new user submitted ABIs
command_2: |
dbt run-operation fsc_evm.run_decoded_logs_history
secrets: inherit

View File

@ -0,0 +1,19 @@
name: dbt_run_scheduled_curated
run-name: dbt_run_scheduled_curated
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Curated Models
command: |
dbt run -m "$PROJECT_NAME,tag:curated" "fsc_evm,tag:curated"
secrets: inherit

View File

@ -1,52 +0,0 @@
name: dbt_run_scheduled_decoded_logs
run-name: dbt_run_scheduled_decoded_logs
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Update the silver and gold tables with the latest decoded logs
run: |
dbt run -m "fsc_evm,tag:bronze_decoded_logs" "fsc_evm,tag:silver_decoded_logs" "fsc_evm,tag:gold_decoded_logs"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -0,0 +1,19 @@
name: dbt_run_scheduled_decoder
run-name: dbt_run_scheduled_decoder
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Decoder Models
command: |
dbt run -m "fsc_evm,tag:bronze,tag:decoded_logs" "fsc_evm,tag:silver,tag:decoded_logs" "fsc_evm,tag:gold,tag:decoded_logs"
secrets: inherit

View File

@ -6,55 +6,14 @@ on:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run Main Models
run: |
dbt run -m "fsc_evm,tag:bronze_core" "fsc_evm,tag:silver_core" "fsc_evm,tag:gold_core" "fsc_evm,tag:silver_prices" "fsc_evm,tag:gold_prices" "fsc_evm,tag:silver_labels" "fsc_evm,tag:gold_labels" "fsc_evm,tag:nft_core" "fsc_evm,tag:silver_confirm_blocks"
- name: Send new logs for decoding
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:bronze_decoded_logs" "fsc_evm,tag:streamline_decoded_logs_realtime" "fsc_evm,tag:streamline_decoded_logs_complete"
- name: Get new contract ABIs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:bronze_abis" "fsc_evm,tag:streamline_abis_realtime" "fsc_evm,tag:streamline_abis_complete"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Main Models
command: |
dbt run -m "fsc_evm,tag:bronze,tag:core" "fsc_evm,tag:silver,tag:core" "fsc_evm,tag:gold,tag:core" "fsc_evm,tag:silver,tag:prices" "fsc_evm,tag:gold,tag:prices" "fsc_evm,tag:silver,tag:labels" "fsc_evm,tag:gold,tag:labels" "fsc_evm,tag:gold,tag:nft" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --exclude "fsc_evm,tag:receipts_by_hash" --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
secrets: inherit

View File

@ -1,3 +1,4 @@
name: dbt_run_scheduled_scores
run-name: dbt_run_scheduled_scores
@ -6,47 +7,14 @@ on:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Update scores
run: |
dbt run -m "fsc_evm,tag:scores"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Scores Models
command: |
dbt run -m "fsc_evm,tag:scores"
secrets: inherit

View File

@ -6,51 +6,15 @@ on:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run Chainhead Models
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline_core_complete" "fsc_evm,tag:streamline_core_realtime" "fsc_evm,tag:streamline_core_complete_receipts" "fsc_evm,tag:streamline_core_realtime_receipts" "fsc_evm,tag:streamline_core_complete_confirm_blocks" "fsc_evm,tag:streamline_core_realtime_confirm_blocks"
- name: Run Chainhead Tests
run: |
dbt test -m "fsc_evm,tag:chainhead"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Chainhead Models
command: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:chainhead" "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash"
dbt test -m "fsc_evm,tag:chainhead"
secrets: inherit

View File

@ -1,56 +0,0 @@
name: dbt_run_streamline_decoded_logs_history
run-name: dbt_run_streamline_decoded_logs_history
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Update complete table
run: |
dbt run -m "fsc_evm,tag:streamline_decoded_logs_complete"
- name: Decode historical logs
run: |
dbt run-operation fsc_evm.decoded_logs_history --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -0,0 +1,22 @@
name: dbt_run_streamline_decoder_history
run-name: dbt_run_streamline_decoder_history
on:
workflow_dispatch:
branches:
- "main"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run Decoder Complete
command: |
dbt run -m "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete"
command_name_2: Run Streamline Decoder History
command_2: |
dbt run-operation fsc_evm.decoded_logs_history --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
secrets: inherit

View File

@ -6,47 +6,14 @@ on:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run History Models
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline_core_complete" "fsc_evm,tag:streamline_core_history" "fsc_evm,tag:streamline_core_complete_receipts" "fsc_evm,tag:streamline_core_history_receipts"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
command_name: Run History Models
command: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:history" --exclude "fsc_evm,tag:receipts_by_hash"
secrets: inherit

View File

@ -1,56 +1,23 @@
name: dbt_test_daily
name: dbt_test_daily
run-name: dbt_test_daily
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_test
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Build Daily Testing Views
run: |
dbt run -m "fsc_evm,tag:daily_test"
- name: Run Daily Tests
run: |
dbt test -m "fsc_evm,tag:daily_test"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
target: test
command_name: Build Daily Testing Views
command: |
dbt run -m "fsc_evm,tag:daily_test"
command_name_2: Run Daily Tests (all tests excluding full, recent and misc. others)
command_2: |
dbt test --exclude "fsc_evm,tag:full_test" "fsc_evm,tag:recent_test" "fsc_evm,tag:gha_tasks" livequery_models
secrets: inherit

View File

@ -5,52 +5,17 @@ on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_test
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Build Recent Testing Views
run: |
dbt run -m "fsc_evm,tag:recent_test"
- name: Run Recent Tests
run: |
dbt test -m "fsc_evm,tag:recent_test" --exclude main_package.core.bronze.token_reads
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
target: test
command_name: Run Observability & Recent Tests
command: |
dbt run -m "fsc_evm,tag:observability"
dbt test -m "fsc_evm,tag:recent_test"
secrets: inherit

View File

@ -5,52 +5,19 @@ on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_test
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Build Full Testing Views
run: |
dbt run -m "fsc_evm,tag:full_test"
- name: Run Full Tests
run: |
dbt test -m "fsc_evm,tag:full_test"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: FlipsideCrypto/fsc-evm/.github/workflows/slack_notify.yml@main
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
called_workflow_template:
uses: FlipsideCrypto/fsc-evm/.github/workflows/dbt_run_template.yml@pre-release/v4-beta
with:
target: test
command_name: Build Full Testing Views
command: |
dbt run -m "fsc_evm,tag:full_test"
command_name_2: Run Full Tests
command_2: |
dbt test -m "fsc_evm,tag:full_test"
secrets: inherit

View File

@ -1,10 +0,0 @@
workflow_name,workflow_schedule
dbt_run_streamline_chainhead,"21,51 * * * *"
dbt_run_scheduled_main,"9 * * * *"
dbt_run_dev_refresh,"41 4 * * 1"
dbt_run_scheduled_decoded_logs,"20 * * * *"
dbt_test_intraday,"37 */4 * * *"
dbt_test_daily,"57 5 * * *"
dbt_run_scheduled_abis,"31 23 * * *"
dbt_run_streamline_decoded_logs_history,"49 22 * * 6"
dbt_run_scheduled_scores,"3 5 * * *"
1 workflow_name workflow_schedule
2 dbt_run_streamline_chainhead 21,51 * * * *
3 dbt_run_scheduled_main 9 * * * *
4 dbt_run_dev_refresh 41 4 * * 1
5 dbt_run_scheduled_decoded_logs 20 * * * *
6 dbt_test_intraday 37 */4 * * *
7 dbt_test_daily 57 5 * * *
8 dbt_run_scheduled_abis 31 23 * * *
9 dbt_run_streamline_decoded_logs_history 49 22 * * 6
10 dbt_run_scheduled_scores 3 5 * * *

View File

@ -71,22 +71,30 @@ models:
+on_schema_change: "append_new_columns"
main_package:
+enabled: false # disable main_package by default, enabled other packages as needed
admin:
+enabled: true
core:
+enabled: true # enable subpackages, as needed
utils:
+enabled: true
github_actions:
+enabled: true
labels:
+enabled: true
observability:
+enabled: true
prices:
+enabled: true
utils:
+enabled: true
decoder_package:
+enabled: false
abis:
+enabled: true
decoded_logs:
+enabled: true
curated_package:
+enabled: false
stats:
+enabled: true
scores_package:
+enabled: true
@ -96,11 +104,8 @@ vars:
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
UPDATE_UDFS_AND_SPS: False
UPDATE_SNOWFLAKE_TAGS: True
OBSERV_FULL_TEST: False
WAIT: 0
HEAL_MODEL: False
HEAL_MODELS: []
START_GHA_TASKS: False
#### STREAMLINE 2.0 BEGIN ####
@ -126,33 +131,4 @@ vars:
- INTERNAL_DEV
- DBT_CLOUD_BOB
#### STREAMLINE 2.0 END ####
#### FSC_EVM BEGIN ####
GLOBAL_PROD_DB_NAME: "bob"
GLOBAL_NODE_SECRET_PATH: "Vault/prod/bob/drpc/mainnet"
GLOBAL_BLOCKS_PER_HOUR: 1800
GLOBAL_WRAPPED_ASSET_ADDRESS: "0x4200000000000000000000000000000000000006"
# Please visit https://github.com/FlipsideCrypto/fsc-evm/wiki for more information on required and optional variables
TRACES_FULL_RELOAD_START_BLOCK: 14000000
### PRICES ###
## REQUIRED
PRICES_NATIVE_SYMBOLS: "ETH"
PRICES_PROVIDER_PLATFORMS: ["bob-network"]
PRICES_NATIVE_BLOCKCHAINS: "ethereum"
### DECODER_PACKAGE VARIABLES BEGIN ###
DECODER_ABIS_BLOCK_EXPLORER_NAME: "GoBOB"
DECODER_ABIS_BLOCK_EXPLORER_URL: "https://explorer-bob-mainnet-0.t.conduit.xyz/api/v2/smart-contracts/"
DECODER_ABIS_BLOCK_EXPLORER_URL_SUFFIX: ""
DECODER_ABIS_BLOCK_EXPLORER_SECRET_PATH: ""
DECODER_ABIS_RELEVANT_CONTRACT_LIMIT: 50
DECODER_ABIS_RELEVANT_CONTRACT_COUNT: 200
DECODER_ABIS_BRONZE_API_TABLE_ENABLED: True
### DECODER_PACKAGE VARIABLES END ###
#### FSC_EVM END ####
#### STREAMLINE 2.0 END ####

137
makefile
View File

@ -1,48 +1,109 @@
DBT_TARGET ?= dev
deploy_streamline_functions:
rm -f package-lock.yml && dbt clean && dbt deps
dbt run -s livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET)
dbt run-operation fsc_utils.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET)
RECEIPTS_BY_HASH_ENABLED ?= false
cleanup_time:
@set -e; \
rm -f package-lock.yml && dbt clean && dbt deps
deploy_streamline_tables:
rm -f package-lock.yml && dbt clean && dbt deps
ifeq ($(findstring dev,$(DBT_TARGET)),dev)
dbt run -m "fsc_evm,tag:bronze_core" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":True}' -t $(DBT_TARGET)
else
dbt run -m "fsc_evm,tag:bronze_core" -t $(DBT_TARGET)
endif
dbt run -m "fsc_evm,tag:streamline_core_complete" "fsc_evm,tag:streamline_core_complete_receipts" "fsc_evm,tag:streamline_core_realtime" "fsc_evm,tag:streamline_core_realtime_receipts" "fsc_evm,tag:utils" --full-refresh -t $(DBT_TARGET)
deploy_gha_workflows_table:
@set -e; \
echo "Collecting workflow names..." ; \
WORKFLOW_VALUES="" ; \
for file in $$(find .github/workflows -name "*.yml" -type f); do \
filename=$$(basename "$$file" .yml) ; \
if [ -z "$$WORKFLOW_VALUES" ]; then \
WORKFLOW_VALUES="('$$filename')" ; \
else \
WORKFLOW_VALUES="$$WORKFLOW_VALUES,('$$filename')" ; \
fi ; \
done ; \
echo "Found workflows: $$WORKFLOW_VALUES" ; \
dbt run-operation create_workflow_table --args "{\"workflow_values\": \"$$WORKFLOW_VALUES\"}" -t $(DBT_TARGET)
deploy_streamline_requests:
rm -f package-lock.yml && dbt clean && dbt deps
dbt run -m "fsc_evm,tag:streamline_core_complete" "fsc_evm,tag:streamline_core_realtime" "fsc_evm,tag:streamline_core_realtime_receipts" "fsc_evm,tag:streamline_core_complete_receipts" --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET)
deploy_gha_tasks:
@set -e; \
make deploy_gha_workflows_table DBT_TARGET=$(DBT_TARGET); \
dbt run -s livequery_models.deploy.marketplace.github --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET); \
dbt run-operation fsc_evm.create_gha_tasks --vars '{"RESUME_GHA_TASKS":True}' -t $(DBT_TARGET)
deploy_github_actions:
dbt run -s livequery_models.deploy.marketplace.github --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET)
dbt seed -s github_actions__workflows -t $(DBT_TARGET)
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET)
ifeq ($(findstring dev,$(DBT_TARGET)),dev)
dbt run-operation fsc_utils.create_gha_tasks --vars '{"START_GHA_TASKS":False}' -t $(DBT_TARGET)
else
dbt run-operation fsc_utils.create_gha_tasks --vars '{"START_GHA_TASKS":True}' -t $(DBT_TARGET)
endif
deploy_new_gha_tasks:
@set -e; \
make deploy_gha_workflows_table DBT_TARGET=$(DBT_TARGET); \
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET); \
dbt run-operation fsc_evm.create_gha_tasks --vars '{"RESUME_GHA_TASKS":True}' -t $(DBT_TARGET)
deploy_new_github_action:
dbt run-operation fsc_evm.drop_github_actions_schema -t $(DBT_TARGET)
dbt seed -s github_actions__workflows -t $(DBT_TARGET)
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET)
ifeq ($(findstring dev,$(DBT_TARGET)),dev)
dbt run-operation fsc_utils.create_gha_tasks --vars '{"START_GHA_TASKS":False}' -t $(DBT_TARGET)
else
dbt run-operation fsc_utils.create_gha_tasks --vars '{"START_GHA_TASKS":True}' -t $(DBT_TARGET)
endif
deploy_livequery:
@set -e; \
dbt run-operation fsc_evm.drop_livequery_schemas --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
dbt run -m livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
dbt run-operation fsc_evm.livequery_grants --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET)
kickoff_backfill:
rm -f package-lock.yml && dbt clean && dbt deps
dbt run -m "fsc_evm,tag:streamline_core_complete" "fsc_evm,tag:streamline_core_history" "fsc_evm,tag:streamline_core_history_receipts" "fsc_evm,tag:streamline_core_complete_receipts" --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET)
deploy_chain_phase_1:
@set -e; \
dbt run -m livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
dbt run-operation fsc_evm.livequery_grants --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
dbt run-operation fsc_evm.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
dbt run-operation fsc_evm.call_sample_rpc_node -t $(DBT_TARGET); \
if [ "$(DBT_TARGET)" != "prod" ]; then \
if [ "$(RECEIPTS_BY_HASH_ENABLED)" = "true" ]; then \
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
dbt test -m "fsc_evm,tag:chainhead"; \
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True, "MAIN_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
else \
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts_by_hash" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
dbt test -m "fsc_evm,tag:chainhead"; \
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True, "MAIN_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
fi; \
else \
if [ "$(RECEIPTS_BY_HASH_ENABLED)" = "true" ]; then \
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts" --full-refresh --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
dbt test -m "fsc_evm,tag:chainhead"; \
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
else \
dbt run -m "fsc_evm,tag:phase_1" --exclude "fsc_evm,tag:receipts_by_hash" --full-refresh --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
dbt test -m "fsc_evm,tag:chainhead"; \
dbt run -m "fsc_evm,tag:streamline,tag:core,tag:complete" "fsc_evm,tag:streamline,tag:core,tag:realtime" --exclude "fsc_evm,tag:receipts_by_hash" "fsc_evm,tag:confirm_blocks" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
fi; \
fi; \
echo "# wait ~10 minutes"; \
echo "# run deploy_chain_phase_2"
.PHONY: deploy_streamline_functions deploy_streamline_tables deploy_streamline_requests deploy_github_actions cleanup_time deploy_new_github_action kickoff_backfill
deploy_chain_phase_2:
@set -e; \
if [ "$(DBT_TARGET)" != "prod" ]; then \
dbt run -m "fsc_evm,tag:phase_2" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_BRONZE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
else \
dbt run -m "fsc_evm,tag:phase_2" --full-refresh --vars '{"GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_BRONZE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:streamline,tag:abis,tag:realtime" "fsc_evm,tag:streamline,tag:abis,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
fi; \
echo "# wait ~10 minutes"; \
echo "# run deploy_chain_phase_3"
deploy_chain_phase_3:
@set -e; \
if [ "$(DBT_TARGET)" != "prod" ]; then \
dbt run -m "fsc_evm,tag:phase_2" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:phase_3" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
else \
dbt run -m "fsc_evm,tag:phase_2" -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:phase_3" --full-refresh --vars '{"GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:silver,tag:abis" "fsc_evm,tag:streamline,tag:decoded_logs,tag:realtime" "fsc_evm,tag:streamline,tag:decoded_logs,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
fi; \
echo "# wait ~10 minutes"; \
echo "# run deploy_chain_phase_4"
deploy_chain_phase_4:
@set -e; \
if [ "$(DBT_TARGET)" != "prod" ]; then \
dbt run -m "fsc_evm,tag:phase_3" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:phase_4" --full-refresh -t $(DBT_TARGET); \
else \
dbt run -m "fsc_evm,tag:phase_3" -t $(DBT_TARGET); \
dbt run -m "fsc_evm,tag:phase_4" --full-refresh -t $(DBT_TARGET); \
make deploy_gha_tasks DBT_TARGET=$(DBT_TARGET); \
fi; \
.PHONY: cleanup_time deploy_gha_workflows_table deploy_gha_tasks deploy_new_gha_tasks deploy_livequery deploy_chain_phase_1 deploy_chain_phase_2 deploy_chain_phase_3 deploy_chain_phase_4

View File

@ -20,6 +20,8 @@ There is more information on how to use dbt docs in the last section of this doc
**Dimension Tables:**
- [dim_labels](https://flipsidecrypto.github.io/bob-models/#!/model/model.fsc_evm.core__dim_labels)
- [dim_contracts](https://flipsidecrypto.github.io/bob-models/#!/model/model.fsc_evm.core__dim_contracts)
- [dim_contract_abis](https://flipsidecrypto.github.io/bob-models/#!/model/model.fsc_evm.core__dim_contract_abis)
**Fact Tables:**
- [fact_blocks](https://flipsidecrypto.github.io/bob-models/#!/model/model.fsc_evm.core__fact_blocks)
@ -27,12 +29,20 @@ There is more information on how to use dbt docs in the last section of this doc
- [fact_transactions](https://flipsidecrypto.github.io/bob-models/#!/model/model.fsc_evm.core__fact_transactions)
- [fact_traces](https://flipsidecrypto.github.io/bob-models/#!/model/model.fsc_evm.core__fact_traces)
**Convenience Tables:**
- [ez_decoded_event_logs](https://flipsidecrypto.github.io/bob-models/#!/model/model.fsc_evm.core__ez_decoded_event_logs)
- [ez_native_transfers](https://flipsidecrypto.github.io/bob-models/#!/model/model.fsc_evm.core__ez_native_transfers)
- [ez_token_transfers](https://flipsidecrypto.github.io/bob-models/#!/model/model.fsc_evm.core__ez_token_transfers)
### Price Tables (bob.price)
- [dim_asset_metadata](https://flipsidecrypto.github.io/bob-models/#!/model/model.fsc_evm.price__dim_asset_metadata)
- [fact_prices_ohlc_hourly](https://flipsidecrypto.github.io/bob-models/#!/model/model.fsc_evm.price__fact_prices_ohlc_hourly)
- [ez_asset_metadata](https://flipsidecrypto.github.io/bob-models/#!/model/model.fsc_evm.price__ez_asset_metadata)
- [ez_prices_hourly](https://flipsidecrypto.github.io/bob-models/#!/model/model.fsc_evm.price__ez_prices_hourly)
### NFT Tables (bob.nft)
- [ez_nft_transfers](https://flipsidecrypto.github.io/bob-models/#!/model/model.fsc_evm.nft__ez_nft_transfers)
## **Helpful User-Defined Functions (UDFs)**
UDFs are custom functions built by the Flipside team that can be used in your queries to make your life easier.

View File

@ -1,25 +1,22 @@
version: 2
sources:
- name: github_actions
database: "{{ target.database }}"
schema: github_actions
tables:
- name: workflows
- name: bronze_streamline
database: streamline
schema: >-
{{ var('GLOBAL_PROD_DB_NAME') ~ ('_dev' if var("STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES", False) else '') }}
schema: |
{{ target.database.upper() | replace('_DEV', '') ~ '_DEV' if var("STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES", False) else target.database.upper() | replace('_DEV', '') }}
tables:
- name: blocks
- name: transactions
- name: >-
{{ 'receipts_by_hash' if var("GLOBAL_USES_RECEIPTS_BY_HASH", False) else 'receipts' }}
- name: receipts
- name: receipts_by_hash
- name: traces
- name: confirm_blocks
- name: decoded_logs
- name: contract_abis
- name: crosschain_silver
database: "{{ 'crosschain' if target.database.upper() == var('GLOBAL_PROD_DB_NAME').upper() else 'crosschain_dev' }}"
database: >-
{{ 'CROSSCHAIN_DEV' if '_DEV' in target.database.upper() else 'CROSSCHAIN' }}
schema: silver
tables:
- name: labels_combined
@ -29,24 +26,37 @@ sources:
- name: complete_provider_prices
- name: complete_token_asset_metadata
- name: complete_token_prices
- name: bronze_api
database: "{{ target.database }}"
schema: bronze_api
tables:
- name: contract_abis
- name: crosschain_public
database: crosschain
schema: bronze_public
tables:
- name: user_abis
- name: silver
- name: complete_streamline
database: "{{ target.database }}"
schema: streamline
tables:
- name: complete_contract_abis
- name: github_actions
database: "{{ target.database }}"
schema: github_actions
tables:
- name: workflows
- name: abis_silver
database: "{{ target.database }}"
schema: silver
tables:
- name: verified_abis
- name: bronze_abi
- name: complete_event_abis
- name: fsc_evm_admin
database: >-
{{ var('GLOBAL_PROD_DB_NAME') ~ ('_dev' if var("STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES", False) else '') }}
schema: streamline
{{ 'FSC_EVM_DEV' if '_DEV' in target.database.upper() else 'FSC_EVM' }}
schema: admin
tables:
- name: complete_contract_abis
- name: _master_keys
- name: rpc_node_logs
- name: logs_temp
database: "{{ target.database }}"
schema: silver
tables:
- name: logs
- name: decoded_logs

View File

@ -1,3 +1,3 @@
packages:
- git: https://github.com/FlipsideCrypto/fsc-evm.git
revision: v3.30.0
revision: v4.0.0-beta.37