chainhead

This commit is contained in:
Austin 2025-01-31 14:38:32 -05:00
parent 10bbc2fb3a
commit ccb2cf51f6
43 changed files with 997 additions and 501 deletions

View File

@ -1,46 +0,0 @@
name: dbt_alter_gha_task
run-name: dbt_alter_gha_task
on:
workflow_dispatch:
branches:
- "main"
inputs:
workflow_name:
type: string
description: Name of the workflow to perform the action on, no .yml extension
required: true
task_action:
type: choice
description: Action to perform
required: true
options:
- SUSPEND
- RESUME
default: SUSPEND
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_alter_gha_tasks.yml@AN-4374/upgrade-dbt-1.7
with:
workflow_name: |
${{ inputs.workflow_name }}
task_action: |
${{ inputs.task_action }}
environment: workflow_prod
secrets: inherit

View File

@ -1,48 +0,0 @@
name: dbt_run_full_observability
run-name: dbt_run_full_observability
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod_2xl
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run Observability Models
run: |
dbt run --threads 2 --vars '{"OBSERV_FULL_TEST":True}' -m "fsc_evm,tag:observability"

View File

@ -1,45 +0,0 @@
name: dbt_run_heal_models
run-name: dbt_run_heal_models
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run Heal Models
run: |
dbt run -m "<evm_chain>_models,tag:heal" --vars '{"HEAL_MODEL":True}'

View File

@ -1,51 +0,0 @@
name: dbt_run_operation_reorg
run-name: dbt_run_operation_reorg
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: List reorg models
id: list_models
run: |
reorg_model_list=$(dbt list --select "<evm_chain>_models,tag:reorg" --resource-type model --output name | grep '__' | awk -F'.' '{print $NF}' | tr '\n' ',' | sed 's/,$//')
echo "model_list=$reorg_model_list" >> $GITHUB_OUTPUT
- name: Execute block_reorg macro
run: |
dbt run-operation fsc_utils.block_reorg --args "{reorg_model_list: '${{ steps.list_models.outputs.model_list }}', hours: '12'}" && awk '/SQL status/ {print; next} /DELETE FROM/{getline; print} /\/\* {/ {print}' logs/dbt.log

View File

@ -1,45 +0,0 @@
name: dbt_run_scheduled_abis
run-name: dbt_run_scheduled_abis
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_test
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run ABI Models
run: |
dbt run -m "fsc_evm,tag:silver_abis" "fsc_evm,tag:gold_abis"

View File

@ -1,45 +0,0 @@
name: dbt_run_scheduled_curated
run-name: dbt_run_scheduled_curated
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run Curated Models
run: |
dbt run -m "<evm_chain>_models,tag:curated" "fsc_evm,tag:curated"

View File

@ -1,49 +0,0 @@
name: dbt_run_scheduled_main
run-name: dbt_run_scheduled_main
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run Main Models
run: |
dbt run -m "fsc_evm,tag:silver_core" "fsc_evm,tag:gold_core" "fsc_evm,tag:silver_prices" "fsc_evm,tag:gold_prices" "fsc_evm,tag:silver_labels" "fsc_evm,tag:gold_labels"
- name: Run Streamline Models
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline_decoded_logs_realtime" "fsc_evm,tag:streamline_decoded_logs_complete"

View File

@ -42,8 +42,8 @@ jobs:
- name: Run Chainhead Models
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline_core_complete" "fsc_evm,tag:streamline_core_realtime" "fsc_evm,tag:streamline_core_complete_receipts" "fsc_evm,tag:streamline_core_realtime_receipts"
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "monad_models,tag:streamline_testnet_complete" "monad_models,tag:streamline_testnet_realtime"
- name: Run Chainhead Tests
run: |
dbt test -m "fsc_evm,tag:chainhead"
dbt test -m "monad_models,tag:chainhead"

View File

@ -1,44 +0,0 @@
name: dbt_run_streamline_decoder
run-name: dbt_run_streamline_decoder
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m "fsc_evm,tag:decoded_logs"

View File

@ -1,44 +0,0 @@
name: dbt_run_streamline_decoder_history
run-name: dbt_run_streamline_decoder_history
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run --threads 8 --vars '{"STREAMLINE_INVOKE_STREAMS":True,"WAIT":120}' -m "fsc_evm,tag:streamline_decoded_logs_complete" "fsc_evm,tag:streamline_decoded_logs_history"

View File

@ -42,4 +42,4 @@ jobs:
- name: Run History Models
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline_core_complete" "fsc_evm,tag:streamline_core_history" "fsc_evm,tag:streamline_core_complete_receipts" "fsc_evm,tag:streamline_core_history_receipts"
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "monad_models,tag:streamline_testnet_complete" "monad_models,tag:streamline_testnet_history"

View File

@ -1,27 +0,0 @@
name: dbt_test_tasks
run-name: dbt_test_tasks
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_test_tasks.yml@AN-4374/upgrade-dbt-1.7
secrets: inherit

View File

@ -1,12 +1,12 @@
## Repo Set Up
1. Create a new repository from the [evm-models-template](https://github.com/FlipsideCrypto/evm-models-template)
2. Update all references to `<evm_chain>` to the new chain name, in lowercase by using find and replace
2. Update all references to `monad` to the new chain name, in lowercase by using find and replace
3. Update the fsc-evm package version in `packages.yml` to the latest version
4. Set up the rest of the dbt project, where applicable, including but not limited to:
- `dbt_project.yml` (enable/disable packages, vars, etc.)
- `.github/workflows` (update tags, etc.)
- `github_actions__workflows.csv` (update schedule, workflows, etc.)
- `overview.md` (update `<evm_chain>`, table references, docs etc.)
- `overview.md` (update `monad`, table references, docs etc.)
- `sources.yml` (update schemas, tables etc.)
- `requirements.txt` (update dependencies)
- other files where applicable

View File

@ -1,3 +1,3 @@
workflow_name,workflow_schedule
dbt_run_streamline_chainhead,"<insert-cron-schedule>"
dbt_run_scheduled_main,"<insert-cron-schedule>"
dbt_run_streamline_chainhead,"6,36 * * * *"
dbt_run_dev_refresh,"8 5 * * 1"
1 workflow_name workflow_schedule
2 dbt_run_streamline_chainhead <insert-cron-schedule> 6,36 * * * *
3 dbt_run_scheduled_main dbt_run_dev_refresh <insert-cron-schedule> 8 5 * * 1

View File

@ -1,17 +1,17 @@
# Name your project! Project names should contain only lowercase characters
# and underscores. A good package name should reflect your organization's
# name or the intended use of these models
name: "<evm_chain>_models" # replace with the name of the chain
name: "monad_models" # replace with the name of the chain
version: "1.0.0"
config-version: 2
# This setting configures which "profile" dbt uses for this project.
profile: "<evm_chain>" # replace with the name of the chain
profile: "monad" # replace with the name of the chain
# These configurations specify where dbt should look for different types of files.
# The `source-paths` config, for example, states that models in this project can be
# found in the "models/" directory. You probably won't need to change these!
model-paths: ["models"]
model-paths: ["models"]
analysis-paths: ["analysis"]
test-paths: ["tests"]
seed-paths: ["data"]
@ -26,10 +26,8 @@ clean-targets: # directories to be removed by `dbt clean`
- "dbt_packages"
tests:
<evm_chain>_models: # replace with the name of the chain
monad_models: # replace with the name of the chain
+store_failures: true # all tests
fsc_evm:
+store_failures: true
on-run-start:
- "{{ fsc_evm.create_sps() }}"
@ -41,7 +39,7 @@ on-run-end:
dispatch:
- macro_namespace: dbt
search_order:
- <evm_chain>-models
- monad-models
- dbt_snowflake_query_tags
- dbt
@ -53,33 +51,22 @@ query-comment:
# Full documentation: https://docs.getdbt.com/docs/configuring-models
models:
<evm_chain>_models: # replace with the name of the chain
monad_models: # replace with the name of the chain
+copy_grants: true
+persist_docs:
relation: true
columns: true
+on_schema_change: "append_new_columns"
livequery_models:
+enabled: true # Keep livequery models enabled since you need them
+materialized: ephemeral
fsc_evm:
+enabled: false # disable fsc_evm package by default
+enabled: false # keep fsc_evm package disabled
+copy_grants: true
+persist_docs:
relation: true
columns: true
+on_schema_change: "append_new_columns"
main_package:
+enabled: false # disable main_package by default, enabled other packages as needed
core:
+enabled: true # enable subpackages, as needed
github_actions:
+enabled: true
labels:
+enabled: true
prices:
+enabled: true
utils:
+enabled: true
vars:
"dbt_date:time_zone": GMT
@ -103,24 +90,33 @@ vars:
config:
# The keys correspond to dbt profiles and are case sensitive
dev:
API_INTEGRATION:
EXTERNAL_FUNCTION_URI:
API_INTEGRATION: AWS_MONAD_API_STG_V2
EXTERNAL_FUNCTION_URI: byqploe1p2.execute-api.us-east-1.amazonaws.com/stg/
ROLES:
- AWS_LAMBDA_<EVM_CHAIN>_API # replace with the name of the chain
- AWS_LAMBDA_MONAD_API # replace with the name of the chain
- INTERNAL_DEV
prod:
API_INTEGRATION:
EXTERNAL_FUNCTION_URI:
API_INTEGRATION: AWS_MONAD_API_PROD_V2
EXTERNAL_FUNCTION_URI: t651otge99.execute-api.us-east-1.amazonaws.com/prod/
ROLES:
- AWS_LAMBDA_<EVM_CHAIN>_API # replace with the name of the chain
- AWS_LAMBDA_MONAD_API # replace with the name of the chain
- INTERNAL_DEV
- DBT_CLOUD_<EVM_CHAIN> # replace with the name of the chain
- DBT_CLOUD_MONAD # replace with the name of the chain
#### STREAMLINE 2.0 END ####
#### FSC_EVM BEGIN ####
### GLOBAL VARIABLES BEGIN ###
## REQUIRED
GLOBAL_PROD_DB_NAME: "monad"
GLOBAL_NODE_SECRET_PATH: "Vault/prod/monad/quicknode/testnet"
GLOBAL_BLOCKS_PER_HOUR: 3600
### GLOBAL VARIABLES END ###
# Please visit https://github.com/FlipsideCrypto/fsc-evm/wiki for more information on required and optional variables
#### FSC_EVM END ####

View File

@ -3,7 +3,7 @@
# Welcome to the Flipside Crypto Core Models Documentation!
## **What does this documentation cover?**
The documentation included here details the design of the Core tables and views available via [Flipside Crypto.](https://flipsidecrypto.xyz/) For more information on how these models are built, please see [the github repository.](https://github.com/FlipsideCrypto/<evm_chain>-models)
The documentation included here details the design of the Core tables and views available via [Flipside Crypto.](https://flipsidecrypto.xyz/) For more information on how these models are built, please see [the github repository.](https://github.com/FlipsideCrypto/monad-models)
## **How do I use these docs?**
The easiest way to navigate this documentation is to use the Quick Links below. These links will take you to the documentation for each table, which contains a description, a list of the columns, and other helpful information.
@ -16,22 +16,22 @@ There is more information on how to use dbt docs in the last section of this doc
**Click on the links below to jump to the documentation for each schema.**
### Core Tables (<evm_chain>.core)
### Core Tables (monad.core)
**Dimension Tables:**
- [dim_labels](https://flipsidecrypto.github.io/<evm_chain>-models/#!/model/model.fsc_evm.core__dim_labels)
- [dim_labels](https://flipsidecrypto.github.io/monad-models/#!/model/model.fsc_evm.core__dim_labels)
**Fact Tables:**
- [fact_blocks](https://flipsidecrypto.github.io/<evm_chain>-models/#!/model/model.fsc_evm.core__fact_blocks)
- [fact_event_logs](https://flipsidecrypto.github.io/<evm_chain>-models/#!/model/model.fsc_evm.core__fact_event_logs)
- [fact_transactions](https://flipsidecrypto.github.io/<evm_chain>-models/#!/model/model.fsc_evm.core__fact_transactions)
- [fact_traces](https://flipsidecrypto.github.io/<evm_chain>-models/#!/model/model.fsc_evm.core__fact_traces)
- [fact_blocks](https://flipsidecrypto.github.io/monad-models/#!/model/model.fsc_evm.core__fact_blocks)
- [fact_event_logs](https://flipsidecrypto.github.io/monad-models/#!/model/model.fsc_evm.core__fact_event_logs)
- [fact_transactions](https://flipsidecrypto.github.io/monad-models/#!/model/model.fsc_evm.core__fact_transactions)
- [fact_traces](https://flipsidecrypto.github.io/monad-models/#!/model/model.fsc_evm.core__fact_traces)
### Price Tables (<evm_chain>.price)
- [dim_asset_metadata](https://flipsidecrypto.github.io/<evm_chain>-models/#!/model/model.fsc_evm.price__dim_asset_metadata)
- [fact_prices_ohlc_hourly](https://flipsidecrypto.github.io/<evm_chain>-models/#!/model/model.fsc_evm.price__fact_prices_ohlc_hourly)
- [ez_asset_metadata](https://flipsidecrypto.github.io/<evm_chain>-models/#!/model/model.fsc_evm.price__ez_asset_metadata)
- [ez_prices_hourly](https://flipsidecrypto.github.io/<evm_chain>-models/#!/model/model.fsc_evm.price__ez_prices_hourly)
### Price Tables (monad.price)
- [dim_asset_metadata](https://flipsidecrypto.github.io/monad-models/#!/model/model.fsc_evm.price__dim_asset_metadata)
- [fact_prices_ohlc_hourly](https://flipsidecrypto.github.io/monad-models/#!/model/model.fsc_evm.price__fact_prices_ohlc_hourly)
- [ez_asset_metadata](https://flipsidecrypto.github.io/monad-models/#!/model/model.fsc_evm.price__ez_asset_metadata)
- [ez_prices_hourly](https://flipsidecrypto.github.io/monad-models/#!/model/model.fsc_evm.price__ez_prices_hourly)
## **Helpful User-Defined Functions (UDFs)**
@ -75,7 +75,7 @@ Note that you can also right-click on models to interactively filter and explore
- [Flipside](https://flipsidecrypto.xyz/)
- [Data Studio](https://flipsidecrypto.xyz/studio)
- [Tutorials](https://docs.flipsidecrypto.com/our-data/tutorials)
- [Github](https://github.com/FlipsideCrypto/<evm_chain>-models)
- [Github](https://github.com/FlipsideCrypto/monad-models)
- [What is dbt?](https://docs.getdbt.com/docs/introduction)
{% enddocs %}

View File

@ -11,13 +11,12 @@ sources:
schema: >-
{{ var('GLOBAL_PROD_DB_NAME') ~ ('_dev' if var("STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES", False) else '') }}
tables:
- name: blocks
- name: transactions
- name: >-
{{ 'receipts_by_hash' if var("GLOBAL_USES_RECEIPTS_BY_HASH", False) else 'receipts' }}
- name: traces
- name: confirm_blocks
- name: decoded_logs
- name: testnet_blocks
- name: testnet_transactions
- name: testnet_receipts
- name: testnet_traces
- name: testnet_confirm_blocks
- name: testnet_decoded_logs
- name: crosschain_silver
database: "{{ 'crosschain' if target.database.upper() == var('GLOBAL_PROD_DB_NAME').upper() else 'crosschain_dev' }}"
schema: silver

View File

@ -0,0 +1,41 @@
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
WITH meta AS (
SELECT
job_created_time AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", "testnet_blocks") }}')
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp,
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
FROM
{{ source(
"bronze_streamline",
"testnet_blocks"
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.partition_key
WHERE
b.partition_key = s.partition_key
AND DATA :error IS NULL
AND DATA IS NOT NULL

View File

@ -0,0 +1,42 @@
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", "testnet_blocks") }}'
)
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp,
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.value :"block_number" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
FROM
{{ source(
"bronze_streamline",
"testnet_blocks"
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.partition_key
WHERE
b.partition_key = s.partition_key
AND DATA :error IS NULL
AND DATA IS NOT NULL

View File

@ -0,0 +1,41 @@
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
WITH meta AS (
SELECT
job_created_time AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", "testnet_confirm_blocks") }}')
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp,
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
FROM
{{ source(
"bronze_streamline",
"testnet_confirm_blocks"
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.partition_key
WHERE
b.partition_key = s.partition_key
AND DATA :error IS NULL
AND DATA IS NOT NULL

View File

@ -0,0 +1,42 @@
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", "testnet_confirm_blocks") }}'
)
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp,
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.value :"block_number" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
FROM
{{ source(
"bronze_streamline",
"testnet_confirm_blocks"
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.partition_key
WHERE
b.partition_key = s.partition_key
AND DATA :error IS NULL
AND DATA IS NOT NULL

View File

@ -0,0 +1,41 @@
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
WITH meta AS (
SELECT
job_created_time AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", "testnet_receipts") }}')
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp,
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
FROM
{{ source(
"bronze_streamline",
"testnet_receipts"
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.partition_key
WHERE
b.partition_key = s.partition_key
AND DATA :error IS NULL
AND DATA IS NOT NULL

View File

@ -0,0 +1,42 @@
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", "testnet_receipts") }}'
)
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp,
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.value :"block_number" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
FROM
{{ source(
"bronze_streamline",
"testnet_receipts"
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.partition_key
WHERE
b.partition_key = s.partition_key
AND DATA :error IS NULL
AND DATA IS NOT NULL

View File

@ -0,0 +1,41 @@
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
WITH meta AS (
SELECT
job_created_time AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", "testnet_traces") }}')
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp,
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
FROM
{{ source(
"bronze_streamline",
"testnet_traces"
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.partition_key
WHERE
b.partition_key = s.partition_key
AND DATA :error IS NULL
AND DATA IS NOT NULL

View File

@ -0,0 +1,42 @@
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", "testnet_traces") }}'
)
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp,
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.value :"block_number" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
FROM
{{ source(
"bronze_streamline",
"testnet_traces"
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.partition_key
WHERE
b.partition_key = s.partition_key
AND DATA :error IS NULL
AND DATA IS NOT NULL

View File

@ -0,0 +1,41 @@
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
WITH meta AS (
SELECT
job_created_time AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", "testnet_transactions") }}')
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp,
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
FROM
{{ source(
"bronze_streamline",
"testnet_transactions"
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.partition_key
WHERE
b.partition_key = s.partition_key
AND DATA :error IS NULL
AND DATA IS NOT NULL

View File

@ -0,0 +1,42 @@
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", "testnet_transactions") }}'
)
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp,
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.value :"block_number" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
FROM
{{ source(
"bronze_streamline",
"testnet_transactions"
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.partition_key
WHERE
b.partition_key = s.partition_key
AND DATA :error IS NULL
AND DATA IS NOT NULL

View File

@ -0,0 +1,33 @@
-- depends_on: {{ ref('bronze_testnet__blocks') }}
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
tags = ['streamline_testnet_complete']
) }}
SELECT
block_number,
file_name,
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_testnet_blocks_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze_testnet__blocks') }}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze_testnet__blocks_fr') }}
{% endif %}
QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,33 @@
-- depends_on: {{ ref('bronze_testnet__receipts') }}
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
tags = ['streamline_testnet_complete']
) }}
SELECT
block_number,
file_name,
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_testnet_receipts_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze_testnet__receipts') }}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze_testnet__receipts_fr') }}
{% endif %}
QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,33 @@
-- depends_on: {{ ref('bronze_testnet__traces') }}
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
tags = ['streamline_testnet_complete']
) }}
SELECT
block_number,
file_name,
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_testnet_traces_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze_testnet__traces') }}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze_testnet__traces_fr') }}
{% endif %}
QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,33 @@
-- depends_on: {{ ref('bronze_testnet__transactions') }}
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
tags = ['streamline_testnet_complete']
) }}
SELECT
block_number,
file_name,
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_testnet_transactions_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze_testnet__transactions') }}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze_testnet__transactions_fr') }}
{% endif %}
QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,54 @@
{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"testnet_blocks_transactions",
"sql_limit" :"2000000",
"producer_batch_size" :"1800",
"worker_batch_size" :"1800",
"sql_source" :"{{this.identifier}}",
"exploded_key": tojson(["result", "result.transactions"]) }
),
tags = ['streamline_testnet_history']
) }}
WITH to_do AS (
SELECT block_number
FROM {{ ref("streamline__testnet_blocks") }}
EXCEPT
SELECT block_number
FROM {{ ref("streamline__testnet_blocks_complete") }} b
INNER JOIN {{ ref("streamline__testnet_transactions_complete") }} t USING(block_number)
),
ready_blocks AS (
SELECT block_number
FROM to_do
)
SELECT
block_number,
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{Service}/{Authentication}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', 'streamline'
),
OBJECT_CONSTRUCT(
'id', block_number,
'jsonrpc', '2.0',
'method', 'eth_getBlockByNumber',
'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE)
),
'{{ node_secret_path }}'
) AS request
FROM
ready_blocks
ORDER BY block_number desc
LIMIT
2000000

View File

@ -0,0 +1,52 @@
{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"testnet_receipts",
"sql_limit" :"2000000",
"producer_batch_size" :"1800",
"worker_batch_size" :"1800",
"sql_source" :"{{this.identifier}}" }
),
tags = ['streamline_testnet_history']
) }}
WITH to_do AS (
SELECT block_number
FROM {{ ref("streamline__testnet_blocks") }}
EXCEPT
SELECT block_number
FROM {{ ref("streamline__testnet_receipts_complete") }}
),
ready_blocks AS (
SELECT block_number
FROM to_do
)
SELECT
block_number,
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{Service}/{Authentication}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', 'streamline'
),
OBJECT_CONSTRUCT(
'id', block_number,
'jsonrpc', '2.0',
'method', 'eth_getBlockReceipts',
'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number))
),
'{{ node_secret_path }}'
) AS request
FROM
ready_blocks
ORDER BY block_number desc
LIMIT
2000000

View File

@ -0,0 +1,53 @@
{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"testnet_traces",
"sql_limit" :"2000000",
"producer_batch_size" :"1800",
"worker_batch_size" :"1800",
"sql_source" :"{{this.identifier}}",
"exploded_key": tojson(["result"]) }
),
tags = ['streamline_testnet_history']
) }}
WITH to_do AS (
SELECT block_number
FROM {{ ref("streamline__testnet_blocks") }}
EXCEPT
SELECT block_number
FROM {{ ref("streamline__testnet_traces_complete") }}
),
ready_blocks AS (
SELECT block_number
FROM to_do
)
SELECT
block_number,
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{Service}/{Authentication}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', 'streamline'
),
OBJECT_CONSTRUCT(
'id', block_number,
'jsonrpc', '2.0',
'method', 'debug_traceBlockByNumber',
'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s'))
),
'{{ node_secret_path }}'
) AS request
FROM
ready_blocks
ORDER BY block_number desc
LIMIT
2000000

View File

@ -0,0 +1,54 @@
{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"testnet_blocks_transactions",
"sql_limit" :"7200",
"producer_batch_size" :"3600",
"worker_batch_size" :"1800",
"sql_source" :"{{this.identifier}}",
"exploded_key": tojson(["result", "result.transactions"]) }
),
tags = ['streamline_testnet_realtime']
) }}
WITH to_do AS (
SELECT block_number
FROM {{ ref("streamline__testnet_blocks") }}
EXCEPT
SELECT block_number
FROM {{ ref("streamline__testnet_blocks_complete") }} b
INNER JOIN {{ ref("streamline__testnet_transactions_complete") }} t USING(block_number)
),
ready_blocks AS (
SELECT block_number
FROM to_do
)
SELECT
block_number,
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{Service}/{Authentication}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', 'streamline'
),
OBJECT_CONSTRUCT(
'id', block_number,
'jsonrpc', '2.0',
'method', 'eth_getBlockByNumber',
'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE)
),
'{{ node_secret_path }}'
) AS request
FROM
ready_blocks
ORDER BY block_number desc
LIMIT
7200

View File

@ -0,0 +1,52 @@
{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"testnet_receipts",
"sql_limit" :"7200",
"producer_batch_size" :"1800",
"worker_batch_size" :"1800",
"sql_source" :"{{this.identifier}}" }
),
tags = ['streamline_testnet_realtime']
) }}
WITH to_do AS (
SELECT block_number
FROM {{ ref("streamline__testnet_blocks") }}
EXCEPT
SELECT block_number
FROM {{ ref("streamline__testnet_receipts_complete") }}
),
ready_blocks AS (
SELECT block_number
FROM to_do
)
SELECT
block_number,
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{Service}/{Authentication}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', 'streamline'
),
OBJECT_CONSTRUCT(
'id', block_number,
'jsonrpc', '2.0',
'method', 'eth_getBlockReceipts',
'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number))
),
'{{ node_secret_path }}'
) AS request
FROM
ready_blocks
ORDER BY block_number desc
LIMIT
7200

View File

@ -0,0 +1,53 @@
{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"testnet_traces",
"sql_limit" :"7200",
"producer_batch_size" :"1800",
"worker_batch_size" :"1800",
"sql_source" :"{{this.identifier}}",
"exploded_key": tojson(["result"]) }
),
tags = ['streamline_testnet_realtime']
) }}
WITH to_do AS (
SELECT block_number
FROM {{ ref("streamline__testnet_blocks") }}
EXCEPT
SELECT block_number
FROM {{ ref("streamline__testnet_traces_complete") }}
),
ready_blocks AS (
SELECT block_number
FROM to_do
)
SELECT
block_number,
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{Service}/{Authentication}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', 'streamline'
),
OBJECT_CONSTRUCT(
'id', block_number,
'jsonrpc', '2.0',
'method', 'debug_traceBlockByNumber',
'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s'))
),
'{{ node_secret_path }}'
) AS request
FROM
ready_blocks
ORDER BY block_number desc
LIMIT
7200

View File

@ -0,0 +1,28 @@
{{ config (
materialized = 'table',
tags = ['streamline_testnet_complete','chainhead']
) }}
SELECT
live.udf_api(
'POST',
'{Service}/{Authentication}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', 'LiveQuery'
),
OBJECT_CONSTRUCT(
'id',
0,
'jsonrpc',
'2.0',
'method',
'eth_blockNumber',
'params',
[]
),
'{{ var('GLOBAL_NODE_SECRET_PATH') }}'
) AS resp,
utils.udf_hex_to_int(
resp :data :result :: STRING
) AS block_number

View File

@ -0,0 +1,9 @@
version: 2
models:
- name: streamline__get_testnet_chainhead
description: "This model is used to get the chainhead from the blockchain."
columns:
- name: BLOCK_NUMBER
tests:
- not_null

View File

@ -0,0 +1,24 @@
{{ config (
materialized = "view",
tags = ['streamline_testnet_complete']
) }}
SELECT
_id,
(
({{ var('GLOBAL_BLOCKS_PER_HOUR',0) }} / 60) * {{ var('GLOBAL_CHAINHEAD_DELAY',3) }}
) :: INT AS block_number_delay, --minute-based block delay
(_id - block_number_delay) :: INT AS block_number,
utils.udf_int_to_hex(block_number) AS block_number_hex
FROM
{{ ref('utils__number_sequence') }}
WHERE
_id <= (
SELECT
COALESCE(
block_number,
0
)
FROM
{{ ref("streamline__get_testnet_chainhead") }}
)

View File

@ -0,0 +1,19 @@
{{ config(
materialized = 'incremental',
cluster_by = 'round(_id,-3)',
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(_id)",
full_refresh = false,
tags = ['utils']
) }}
SELECT
ROW_NUMBER() over (
ORDER BY
SEQ4()
) - 1 :: INT AS _id
FROM
TABLE(GENERATOR(rowcount => 1000000000))
WHERE 1=1
{% if is_incremental() %}
AND 1=0
{% endif %}

View File

@ -1,3 +1,3 @@
packages:
- git: https://github.com/FlipsideCrypto/fsc-evm.git
revision: <insert-version-v3+>
revision: v3.21.1

View File

@ -1,4 +1,4 @@
<evm_chain>:
monad:
target: prod
outputs:
dev: