diff --git a/.github/workflows/dbt_alter_gha_task.yml b/.github/workflows/dbt_alter_gha_task.yml deleted file mode 100644 index 8f00313..0000000 --- a/.github/workflows/dbt_alter_gha_task.yml +++ /dev/null @@ -1,46 +0,0 @@ -name: dbt_alter_gha_task -run-name: dbt_alter_gha_task - -on: - workflow_dispatch: - branches: - - "main" - inputs: - workflow_name: - type: string - description: Name of the workflow to perform the action on, no .yml extension - required: true - task_action: - type: choice - description: Action to perform - required: true - options: - - SUSPEND - - RESUME - default: SUSPEND - -env: - DBT_PROFILES_DIR: ./ - - ACCOUNT: "${{ vars.ACCOUNT }}" - ROLE: "${{ vars.ROLE }}" - USER: "${{ vars.USER }}" - PASSWORD: "${{ secrets.PASSWORD }}" - REGION: "${{ vars.REGION }}" - DATABASE: "${{ vars.DATABASE }}" - WAREHOUSE: "${{ vars.WAREHOUSE }}" - SCHEMA: "${{ vars.SCHEMA }}" - -concurrency: - group: ${{ github.workflow }} - -jobs: - called_workflow_template: - uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_alter_gha_tasks.yml@AN-4374/upgrade-dbt-1.7 - with: - workflow_name: | - ${{ inputs.workflow_name }} - task_action: | - ${{ inputs.task_action }} - environment: workflow_prod - secrets: inherit \ No newline at end of file diff --git a/.github/workflows/dbt_run_full_observability.yml b/.github/workflows/dbt_run_full_observability.yml deleted file mode 100644 index dd4e8b7..0000000 --- a/.github/workflows/dbt_run_full_observability.yml +++ /dev/null @@ -1,48 +0,0 @@ -name: dbt_run_full_observability -run-name: dbt_run_full_observability - -on: - workflow_dispatch: - branches: - - "main" - -env: - DBT_PROFILES_DIR: ./ - - ACCOUNT: "${{ vars.ACCOUNT }}" - ROLE: "${{ vars.ROLE }}" - USER: "${{ vars.USER }}" - PASSWORD: "${{ secrets.PASSWORD }}" - REGION: "${{ vars.REGION }}" - DATABASE: "${{ vars.DATABASE }}" - WAREHOUSE: "${{ vars.WAREHOUSE }}" - SCHEMA: "${{ vars.SCHEMA }}" - -concurrency: - group: ${{ github.workflow }} - -jobs: - run_dbt_jobs: - runs-on: ubuntu-latest - environment: - name: workflow_prod_2xl - - steps: - - uses: actions/checkout@v3 - - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - cache: "pip" - - - name: install dependencies - run: | - pip install -r requirements.txt - dbt deps - - - name: Run Observability Models - run: | - dbt run --threads 2 --vars '{"OBSERV_FULL_TEST":True}' -m "fsc_evm,tag:observability" - - - diff --git a/.github/workflows/dbt_run_heal_models.yml b/.github/workflows/dbt_run_heal_models.yml deleted file mode 100644 index 834347b..0000000 --- a/.github/workflows/dbt_run_heal_models.yml +++ /dev/null @@ -1,45 +0,0 @@ -name: dbt_run_heal_models -run-name: dbt_run_heal_models - -on: - workflow_dispatch: - branches: - - "main" - -env: - DBT_PROFILES_DIR: ./ - - ACCOUNT: "${{ vars.ACCOUNT }}" - ROLE: "${{ vars.ROLE }}" - USER: "${{ vars.USER }}" - PASSWORD: "${{ secrets.PASSWORD }}" - REGION: "${{ vars.REGION }}" - DATABASE: "${{ vars.DATABASE }}" - WAREHOUSE: "${{ vars.WAREHOUSE }}" - SCHEMA: "${{ vars.SCHEMA }}" - -concurrency: - group: ${{ github.workflow }} - -jobs: - run_dbt_jobs: - runs-on: ubuntu-latest - environment: - name: workflow_prod - - steps: - - uses: actions/checkout@v3 - - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - cache: "pip" - - - name: install dependencies - run: | - pip install -r requirements.txt - dbt deps - - - name: Run Heal Models - run: | - dbt run -m "_models,tag:heal" --vars '{"HEAL_MODEL":True}' \ No newline at end of file diff --git a/.github/workflows/dbt_run_operation_reorg.yml b/.github/workflows/dbt_run_operation_reorg.yml deleted file mode 100644 index 8f53096..0000000 --- a/.github/workflows/dbt_run_operation_reorg.yml +++ /dev/null @@ -1,51 +0,0 @@ -name: dbt_run_operation_reorg -run-name: dbt_run_operation_reorg - -on: - workflow_dispatch: - branches: - - "main" - -env: - DBT_PROFILES_DIR: ./ - - ACCOUNT: "${{ vars.ACCOUNT }}" - ROLE: "${{ vars.ROLE }}" - USER: "${{ vars.USER }}" - PASSWORD: "${{ secrets.PASSWORD }}" - REGION: "${{ vars.REGION }}" - DATABASE: "${{ vars.DATABASE }}" - WAREHOUSE: "${{ vars.WAREHOUSE }}" - SCHEMA: "${{ vars.SCHEMA }}" - -concurrency: - group: ${{ github.workflow }} - -jobs: - run_dbt_jobs: - runs-on: ubuntu-latest - environment: - name: workflow_prod - - steps: - - uses: actions/checkout@v3 - - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - cache: "pip" - - - name: install dependencies - run: | - pip install -r requirements.txt - dbt deps - - - name: List reorg models - id: list_models - run: | - reorg_model_list=$(dbt list --select "_models,tag:reorg" --resource-type model --output name | grep '__' | awk -F'.' '{print $NF}' | tr '\n' ',' | sed 's/,$//') - echo "model_list=$reorg_model_list" >> $GITHUB_OUTPUT - - - name: Execute block_reorg macro - run: | - dbt run-operation fsc_utils.block_reorg --args "{reorg_model_list: '${{ steps.list_models.outputs.model_list }}', hours: '12'}" && awk '/SQL status/ {print; next} /DELETE FROM/{getline; print} /\/\* {/ {print}' logs/dbt.log \ No newline at end of file diff --git a/.github/workflows/dbt_run_scheduled_abis.yml b/.github/workflows/dbt_run_scheduled_abis.yml deleted file mode 100644 index 61a044c..0000000 --- a/.github/workflows/dbt_run_scheduled_abis.yml +++ /dev/null @@ -1,45 +0,0 @@ -name: dbt_run_scheduled_abis -run-name: dbt_run_scheduled_abis - -on: - workflow_dispatch: - branches: - - "main" - -env: - DBT_PROFILES_DIR: ./ - - ACCOUNT: "${{ vars.ACCOUNT }}" - ROLE: "${{ vars.ROLE }}" - USER: "${{ vars.USER }}" - PASSWORD: "${{ secrets.PASSWORD }}" - REGION: "${{ vars.REGION }}" - DATABASE: "${{ vars.DATABASE }}" - WAREHOUSE: "${{ vars.WAREHOUSE }}" - SCHEMA: "${{ vars.SCHEMA }}" - -concurrency: - group: ${{ github.workflow }} - -jobs: - run_dbt_jobs: - runs-on: ubuntu-latest - environment: - name: workflow_test - - steps: - - uses: actions/checkout@v3 - - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - cache: "pip" - - - name: install dependencies - run: | - pip install -r requirements.txt - dbt deps - - - name: Run ABI Models - run: | - dbt run -m "fsc_evm,tag:silver_abis" "fsc_evm,tag:gold_abis" \ No newline at end of file diff --git a/.github/workflows/dbt_run_scheduled_curated.yml b/.github/workflows/dbt_run_scheduled_curated.yml deleted file mode 100644 index d430226..0000000 --- a/.github/workflows/dbt_run_scheduled_curated.yml +++ /dev/null @@ -1,45 +0,0 @@ -name: dbt_run_scheduled_curated -run-name: dbt_run_scheduled_curated - -on: - workflow_dispatch: - branches: - - "main" - -env: - DBT_PROFILES_DIR: ./ - - ACCOUNT: "${{ vars.ACCOUNT }}" - ROLE: "${{ vars.ROLE }}" - USER: "${{ vars.USER }}" - PASSWORD: "${{ secrets.PASSWORD }}" - REGION: "${{ vars.REGION }}" - DATABASE: "${{ vars.DATABASE }}" - WAREHOUSE: "${{ vars.WAREHOUSE }}" - SCHEMA: "${{ vars.SCHEMA }}" - -concurrency: - group: ${{ github.workflow }} - -jobs: - run_dbt_jobs: - runs-on: ubuntu-latest - environment: - name: workflow_prod - - steps: - - uses: actions/checkout@v3 - - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - cache: "pip" - - - name: install dependencies - run: | - pip install -r requirements.txt - dbt deps - - - name: Run Curated Models - run: | - dbt run -m "_models,tag:curated" "fsc_evm,tag:curated" \ No newline at end of file diff --git a/.github/workflows/dbt_run_scheduled_main.yml b/.github/workflows/dbt_run_scheduled_main.yml deleted file mode 100644 index e96788e..0000000 --- a/.github/workflows/dbt_run_scheduled_main.yml +++ /dev/null @@ -1,49 +0,0 @@ -name: dbt_run_scheduled_main -run-name: dbt_run_scheduled_main - -on: - workflow_dispatch: - branches: - - "main" - -env: - DBT_PROFILES_DIR: ./ - - ACCOUNT: "${{ vars.ACCOUNT }}" - ROLE: "${{ vars.ROLE }}" - USER: "${{ vars.USER }}" - PASSWORD: "${{ secrets.PASSWORD }}" - REGION: "${{ vars.REGION }}" - DATABASE: "${{ vars.DATABASE }}" - WAREHOUSE: "${{ vars.WAREHOUSE }}" - SCHEMA: "${{ vars.SCHEMA }}" - -concurrency: - group: ${{ github.workflow }} - -jobs: - run_dbt_jobs: - runs-on: ubuntu-latest - environment: - name: workflow_prod - - steps: - - uses: actions/checkout@v3 - - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - cache: "pip" - - - name: install dependencies - run: | - pip install -r requirements.txt - dbt deps - - - name: Run Main Models - run: | - dbt run -m "fsc_evm,tag:silver_core" "fsc_evm,tag:gold_core" "fsc_evm,tag:silver_prices" "fsc_evm,tag:gold_prices" "fsc_evm,tag:silver_labels" "fsc_evm,tag:gold_labels" - - - name: Run Streamline Models - run: | - dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline_decoded_logs_realtime" "fsc_evm,tag:streamline_decoded_logs_complete" \ No newline at end of file diff --git a/.github/workflows/dbt_run_streamline_chainhead.yml b/.github/workflows/dbt_run_streamline_chainhead.yml index ed7781e..6f058e7 100644 --- a/.github/workflows/dbt_run_streamline_chainhead.yml +++ b/.github/workflows/dbt_run_streamline_chainhead.yml @@ -42,8 +42,8 @@ jobs: - name: Run Chainhead Models run: | - dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline_core_complete" "fsc_evm,tag:streamline_core_realtime" "fsc_evm,tag:streamline_core_complete_receipts" "fsc_evm,tag:streamline_core_realtime_receipts" + dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "monad_models,tag:streamline_testnet_complete" "monad_models,tag:streamline_testnet_realtime" - name: Run Chainhead Tests run: | - dbt test -m "fsc_evm,tag:chainhead" \ No newline at end of file + dbt test -m "monad_models,tag:chainhead" \ No newline at end of file diff --git a/.github/workflows/dbt_run_streamline_decoder.yml b/.github/workflows/dbt_run_streamline_decoder.yml deleted file mode 100644 index ee57d05..0000000 --- a/.github/workflows/dbt_run_streamline_decoder.yml +++ /dev/null @@ -1,44 +0,0 @@ -name: dbt_run_streamline_decoder -run-name: dbt_run_streamline_decoder - -on: - workflow_dispatch: - branches: - - "main" - -env: - DBT_PROFILES_DIR: ./ - - ACCOUNT: "${{ vars.ACCOUNT }}" - ROLE: "${{ vars.ROLE }}" - USER: "${{ vars.USER }}" - PASSWORD: "${{ secrets.PASSWORD }}" - REGION: "${{ vars.REGION }}" - DATABASE: "${{ vars.DATABASE }}" - WAREHOUSE: "${{ vars.WAREHOUSE }}" - SCHEMA: "${{ vars.SCHEMA }}" - -concurrency: - group: ${{ github.workflow }} - -jobs: - run_dbt_jobs: - runs-on: ubuntu-latest - environment: - name: workflow_prod - - steps: - - uses: actions/checkout@v3 - - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - cache: "pip" - - - name: install dependencies - run: | - pip install -r requirements.txt - dbt deps - - name: Run DBT Jobs - run: | - dbt run -m "fsc_evm,tag:decoded_logs" \ No newline at end of file diff --git a/.github/workflows/dbt_run_streamline_decoder_history.yml b/.github/workflows/dbt_run_streamline_decoder_history.yml deleted file mode 100644 index 1163979..0000000 --- a/.github/workflows/dbt_run_streamline_decoder_history.yml +++ /dev/null @@ -1,44 +0,0 @@ -name: dbt_run_streamline_decoder_history -run-name: dbt_run_streamline_decoder_history - -on: - workflow_dispatch: - branches: - - "main" - -env: - DBT_PROFILES_DIR: ./ - - ACCOUNT: "${{ vars.ACCOUNT }}" - ROLE: "${{ vars.ROLE }}" - USER: "${{ vars.USER }}" - PASSWORD: "${{ secrets.PASSWORD }}" - REGION: "${{ vars.REGION }}" - DATABASE: "${{ vars.DATABASE }}" - WAREHOUSE: "${{ vars.WAREHOUSE }}" - SCHEMA: "${{ vars.SCHEMA }}" - -concurrency: - group: ${{ github.workflow }} - -jobs: - run_dbt_jobs: - runs-on: ubuntu-latest - environment: - name: workflow_prod - - steps: - - uses: actions/checkout@v3 - - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - cache: "pip" - - - name: install dependencies - run: | - pip install -r requirements.txt - dbt deps - - name: Run DBT Jobs - run: | - dbt run --threads 8 --vars '{"STREAMLINE_INVOKE_STREAMS":True,"WAIT":120}' -m "fsc_evm,tag:streamline_decoded_logs_complete" "fsc_evm,tag:streamline_decoded_logs_history" \ No newline at end of file diff --git a/.github/workflows/dbt_run_streamline_history.yml b/.github/workflows/dbt_run_streamline_history.yml index bf57ca8..64e033a 100644 --- a/.github/workflows/dbt_run_streamline_history.yml +++ b/.github/workflows/dbt_run_streamline_history.yml @@ -42,4 +42,4 @@ jobs: - name: Run History Models run: | - dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline_core_complete" "fsc_evm,tag:streamline_core_history" "fsc_evm,tag:streamline_core_complete_receipts" "fsc_evm,tag:streamline_core_history_receipts" + dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "monad_models,tag:streamline_testnet_complete" "monad_models,tag:streamline_testnet_history" \ No newline at end of file diff --git a/.github/workflows/dbt_test_tasks.yml b/.github/workflows/dbt_test_tasks.yml deleted file mode 100644 index 8adb7f5..0000000 --- a/.github/workflows/dbt_test_tasks.yml +++ /dev/null @@ -1,27 +0,0 @@ -name: dbt_test_tasks -run-name: dbt_test_tasks - -on: - workflow_dispatch: - branches: - - "main" - -env: - DBT_PROFILES_DIR: ./ - - ACCOUNT: "${{ vars.ACCOUNT }}" - ROLE: "${{ vars.ROLE }}" - USER: "${{ vars.USER }}" - PASSWORD: "${{ secrets.PASSWORD }}" - REGION: "${{ vars.REGION }}" - DATABASE: "${{ vars.DATABASE }}" - WAREHOUSE: "${{ vars.WAREHOUSE }}" - SCHEMA: "${{ vars.SCHEMA }}" - -concurrency: - group: ${{ github.workflow }} - -jobs: - called_workflow_template: - uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_test_tasks.yml@AN-4374/upgrade-dbt-1.7 - secrets: inherit \ No newline at end of file diff --git a/README.md b/README.md index c53e43d..b1db184 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,12 @@ ## Repo Set Up 1. Create a new repository from the [evm-models-template](https://github.com/FlipsideCrypto/evm-models-template) -2. Update all references to `` to the new chain name, in lowercase by using find and replace +2. Update all references to `monad` to the new chain name, in lowercase by using find and replace 3. Update the fsc-evm package version in `packages.yml` to the latest version 4. Set up the rest of the dbt project, where applicable, including but not limited to: - `dbt_project.yml` (enable/disable packages, vars, etc.) - `.github/workflows` (update tags, etc.) - `github_actions__workflows.csv` (update schedule, workflows, etc.) - - `overview.md` (update ``, table references, docs etc.) + - `overview.md` (update `monad`, table references, docs etc.) - `sources.yml` (update schemas, tables etc.) - `requirements.txt` (update dependencies) - other files where applicable diff --git a/data/github_actions__workflows.csv b/data/github_actions__workflows.csv index 1e46ed5..f19e861 100644 --- a/data/github_actions__workflows.csv +++ b/data/github_actions__workflows.csv @@ -1,3 +1,3 @@ workflow_name,workflow_schedule -dbt_run_streamline_chainhead,"" -dbt_run_scheduled_main,"" \ No newline at end of file +dbt_run_streamline_chainhead,"6,36 * * * *" +dbt_run_dev_refresh,"8 5 * * 1" \ No newline at end of file diff --git a/dbt_project.yml b/dbt_project.yml index c4ba09d..9da1479 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -1,17 +1,17 @@ # Name your project! Project names should contain only lowercase characters # and underscores. A good package name should reflect your organization's # name or the intended use of these models -name: "_models" # replace with the name of the chain +name: "monad_models" # replace with the name of the chain version: "1.0.0" config-version: 2 # This setting configures which "profile" dbt uses for this project. -profile: "" # replace with the name of the chain +profile: "monad" # replace with the name of the chain # These configurations specify where dbt should look for different types of files. # The `source-paths` config, for example, states that models in this project can be # found in the "models/" directory. You probably won't need to change these! -model-paths: ["models"] +model-paths: ["models"] analysis-paths: ["analysis"] test-paths: ["tests"] seed-paths: ["data"] @@ -26,10 +26,8 @@ clean-targets: # directories to be removed by `dbt clean` - "dbt_packages" tests: - _models: # replace with the name of the chain + monad_models: # replace with the name of the chain +store_failures: true # all tests - fsc_evm: - +store_failures: true on-run-start: - "{{ fsc_evm.create_sps() }}" @@ -41,7 +39,7 @@ on-run-end: dispatch: - macro_namespace: dbt search_order: - - -models + - monad-models - dbt_snowflake_query_tags - dbt @@ -53,33 +51,22 @@ query-comment: # Full documentation: https://docs.getdbt.com/docs/configuring-models models: - _models: # replace with the name of the chain + monad_models: # replace with the name of the chain +copy_grants: true +persist_docs: relation: true columns: true +on_schema_change: "append_new_columns" livequery_models: + +enabled: true # Keep livequery models enabled since you need them +materialized: ephemeral fsc_evm: - +enabled: false # disable fsc_evm package by default + +enabled: false # keep fsc_evm package disabled +copy_grants: true +persist_docs: relation: true columns: true +on_schema_change: "append_new_columns" - main_package: - +enabled: false # disable main_package by default, enabled other packages as needed - core: - +enabled: true # enable subpackages, as needed - github_actions: - +enabled: true - labels: - +enabled: true - prices: - +enabled: true - utils: - +enabled: true vars: "dbt_date:time_zone": GMT @@ -103,24 +90,33 @@ vars: config: # The keys correspond to dbt profiles and are case sensitive dev: - API_INTEGRATION: - EXTERNAL_FUNCTION_URI: + API_INTEGRATION: AWS_MONAD_API_STG_V2 + EXTERNAL_FUNCTION_URI: byqploe1p2.execute-api.us-east-1.amazonaws.com/stg/ ROLES: - - AWS_LAMBDA__API # replace with the name of the chain + - AWS_LAMBDA_MONAD_API # replace with the name of the chain - INTERNAL_DEV prod: - API_INTEGRATION: - EXTERNAL_FUNCTION_URI: + API_INTEGRATION: AWS_MONAD_API_PROD_V2 + EXTERNAL_FUNCTION_URI: t651otge99.execute-api.us-east-1.amazonaws.com/prod/ ROLES: - - AWS_LAMBDA__API # replace with the name of the chain + - AWS_LAMBDA_MONAD_API # replace with the name of the chain - INTERNAL_DEV - - DBT_CLOUD_ # replace with the name of the chain + - DBT_CLOUD_MONAD # replace with the name of the chain #### STREAMLINE 2.0 END #### #### FSC_EVM BEGIN #### + ### GLOBAL VARIABLES BEGIN ### + + ## REQUIRED + GLOBAL_PROD_DB_NAME: "monad" + GLOBAL_NODE_SECRET_PATH: "Vault/prod/monad/quicknode/testnet" + GLOBAL_BLOCKS_PER_HOUR: 3600 + + ### GLOBAL VARIABLES END ### + # Please visit https://github.com/FlipsideCrypto/fsc-evm/wiki for more information on required and optional variables #### FSC_EVM END #### \ No newline at end of file diff --git a/models/__overview__.md b/models/__overview__.md index 430f616..89a59b4 100644 --- a/models/__overview__.md +++ b/models/__overview__.md @@ -3,7 +3,7 @@ # Welcome to the Flipside Crypto Core Models Documentation! ## **What does this documentation cover?** -The documentation included here details the design of the Core tables and views available via [Flipside Crypto.](https://flipsidecrypto.xyz/) For more information on how these models are built, please see [the github repository.](https://github.com/FlipsideCrypto/-models) +The documentation included here details the design of the Core tables and views available via [Flipside Crypto.](https://flipsidecrypto.xyz/) For more information on how these models are built, please see [the github repository.](https://github.com/FlipsideCrypto/monad-models) ## **How do I use these docs?** The easiest way to navigate this documentation is to use the Quick Links below. These links will take you to the documentation for each table, which contains a description, a list of the columns, and other helpful information. @@ -16,22 +16,22 @@ There is more information on how to use dbt docs in the last section of this doc **Click on the links below to jump to the documentation for each schema.** -### Core Tables (.core) +### Core Tables (monad.core) **Dimension Tables:** -- [dim_labels](https://flipsidecrypto.github.io/-models/#!/model/model.fsc_evm.core__dim_labels) +- [dim_labels](https://flipsidecrypto.github.io/monad-models/#!/model/model.fsc_evm.core__dim_labels) **Fact Tables:** -- [fact_blocks](https://flipsidecrypto.github.io/-models/#!/model/model.fsc_evm.core__fact_blocks) -- [fact_event_logs](https://flipsidecrypto.github.io/-models/#!/model/model.fsc_evm.core__fact_event_logs) -- [fact_transactions](https://flipsidecrypto.github.io/-models/#!/model/model.fsc_evm.core__fact_transactions) -- [fact_traces](https://flipsidecrypto.github.io/-models/#!/model/model.fsc_evm.core__fact_traces) +- [fact_blocks](https://flipsidecrypto.github.io/monad-models/#!/model/model.fsc_evm.core__fact_blocks) +- [fact_event_logs](https://flipsidecrypto.github.io/monad-models/#!/model/model.fsc_evm.core__fact_event_logs) +- [fact_transactions](https://flipsidecrypto.github.io/monad-models/#!/model/model.fsc_evm.core__fact_transactions) +- [fact_traces](https://flipsidecrypto.github.io/monad-models/#!/model/model.fsc_evm.core__fact_traces) -### Price Tables (.price) -- [dim_asset_metadata](https://flipsidecrypto.github.io/-models/#!/model/model.fsc_evm.price__dim_asset_metadata) -- [fact_prices_ohlc_hourly](https://flipsidecrypto.github.io/-models/#!/model/model.fsc_evm.price__fact_prices_ohlc_hourly) -- [ez_asset_metadata](https://flipsidecrypto.github.io/-models/#!/model/model.fsc_evm.price__ez_asset_metadata) -- [ez_prices_hourly](https://flipsidecrypto.github.io/-models/#!/model/model.fsc_evm.price__ez_prices_hourly) +### Price Tables (monad.price) +- [dim_asset_metadata](https://flipsidecrypto.github.io/monad-models/#!/model/model.fsc_evm.price__dim_asset_metadata) +- [fact_prices_ohlc_hourly](https://flipsidecrypto.github.io/monad-models/#!/model/model.fsc_evm.price__fact_prices_ohlc_hourly) +- [ez_asset_metadata](https://flipsidecrypto.github.io/monad-models/#!/model/model.fsc_evm.price__ez_asset_metadata) +- [ez_prices_hourly](https://flipsidecrypto.github.io/monad-models/#!/model/model.fsc_evm.price__ez_prices_hourly) ## **Helpful User-Defined Functions (UDFs)** @@ -75,7 +75,7 @@ Note that you can also right-click on models to interactively filter and explore - [Flipside](https://flipsidecrypto.xyz/) - [Data Studio](https://flipsidecrypto.xyz/studio) - [Tutorials](https://docs.flipsidecrypto.com/our-data/tutorials) -- [Github](https://github.com/FlipsideCrypto/-models) +- [Github](https://github.com/FlipsideCrypto/monad-models) - [What is dbt?](https://docs.getdbt.com/docs/introduction) {% enddocs %} \ No newline at end of file diff --git a/models/sources.yml b/models/sources.yml index d8d168e..8d38ff0 100644 --- a/models/sources.yml +++ b/models/sources.yml @@ -11,13 +11,12 @@ sources: schema: >- {{ var('GLOBAL_PROD_DB_NAME') ~ ('_dev' if var("STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES", False) else '') }} tables: - - name: blocks - - name: transactions - - name: >- - {{ 'receipts_by_hash' if var("GLOBAL_USES_RECEIPTS_BY_HASH", False) else 'receipts' }} - - name: traces - - name: confirm_blocks - - name: decoded_logs + - name: testnet_blocks + - name: testnet_transactions + - name: testnet_receipts + - name: testnet_traces + - name: testnet_confirm_blocks + - name: testnet_decoded_logs - name: crosschain_silver database: "{{ 'crosschain' if target.database.upper() == var('GLOBAL_PROD_DB_NAME').upper() else 'crosschain_dev' }}" schema: silver diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__blocks.sql b/models/testnet/core/bronze/streamline/bronze_testnet__blocks.sql new file mode 100644 index 0000000..ac61e42 --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__blocks.sql @@ -0,0 +1,41 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + job_created_time AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_file_registration_history( + start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()), + table_name => '{{ source( "bronze_streamline", "testnet_blocks") }}') + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_blocks" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__blocks_fr.sql b/models/testnet/core/bronze/streamline/bronze_testnet__blocks_fr.sql new file mode 100644 index 0000000..48faf7a --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__blocks_fr.sql @@ -0,0 +1,42 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + registered_on AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_files( + table_name => '{{ source( "bronze_streamline", "testnet_blocks") }}' + ) + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.value :"block_number" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_blocks" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__confirm_blocks.sql b/models/testnet/core/bronze/streamline/bronze_testnet__confirm_blocks.sql new file mode 100644 index 0000000..f691685 --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__confirm_blocks.sql @@ -0,0 +1,41 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + job_created_time AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_file_registration_history( + start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()), + table_name => '{{ source( "bronze_streamline", "testnet_confirm_blocks") }}') + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_confirm_blocks" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__confirm_blocks_fr.sql b/models/testnet/core/bronze/streamline/bronze_testnet__confirm_blocks_fr.sql new file mode 100644 index 0000000..6e45963 --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__confirm_blocks_fr.sql @@ -0,0 +1,42 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + registered_on AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_files( + table_name => '{{ source( "bronze_streamline", "testnet_confirm_blocks") }}' + ) + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.value :"block_number" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_confirm_blocks" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__receipts.sql b/models/testnet/core/bronze/streamline/bronze_testnet__receipts.sql new file mode 100644 index 0000000..4ad9876 --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__receipts.sql @@ -0,0 +1,41 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + job_created_time AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_file_registration_history( + start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()), + table_name => '{{ source( "bronze_streamline", "testnet_receipts") }}') + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_receipts" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__receipts_fr.sql b/models/testnet/core/bronze/streamline/bronze_testnet__receipts_fr.sql new file mode 100644 index 0000000..43efefd --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__receipts_fr.sql @@ -0,0 +1,42 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + registered_on AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_files( + table_name => '{{ source( "bronze_streamline", "testnet_receipts") }}' + ) + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.value :"block_number" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_receipts" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__traces.sql b/models/testnet/core/bronze/streamline/bronze_testnet__traces.sql new file mode 100644 index 0000000..921a718 --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__traces.sql @@ -0,0 +1,41 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + job_created_time AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_file_registration_history( + start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()), + table_name => '{{ source( "bronze_streamline", "testnet_traces") }}') + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_traces" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__traces_fr.sql b/models/testnet/core/bronze/streamline/bronze_testnet__traces_fr.sql new file mode 100644 index 0000000..c9de29c --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__traces_fr.sql @@ -0,0 +1,42 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + registered_on AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_files( + table_name => '{{ source( "bronze_streamline", "testnet_traces") }}' + ) + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.value :"block_number" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_traces" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__transactions.sql b/models/testnet/core/bronze/streamline/bronze_testnet__transactions.sql new file mode 100644 index 0000000..1ae7b59 --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__transactions.sql @@ -0,0 +1,41 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + job_created_time AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_file_registration_history( + start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()), + table_name => '{{ source( "bronze_streamline", "testnet_transactions") }}') + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_transactions" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/bronze/streamline/bronze_testnet__transactions_fr.sql b/models/testnet/core/bronze/streamline/bronze_testnet__transactions_fr.sql new file mode 100644 index 0000000..8cfa2f2 --- /dev/null +++ b/models/testnet/core/bronze/streamline/bronze_testnet__transactions_fr.sql @@ -0,0 +1,42 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +WITH meta AS ( + SELECT + registered_on AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS partition_key + FROM + TABLE( + information_schema.external_table_files( + table_name => '{{ source( "bronze_streamline", "testnet_transactions") }}' + ) + ) A +) +SELECT + s.*, + b.file_name, + b._inserted_timestamp, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.value :"block_number" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +FROM + {{ source( + "bronze_streamline", + "testnet_transactions" + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key +WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL \ No newline at end of file diff --git a/models/testnet/core/streamline/complete/streamline__testnet_blocks_complete.sql b/models/testnet/core/streamline/complete/streamline__testnet_blocks_complete.sql new file mode 100644 index 0000000..35c1fe4 --- /dev/null +++ b/models/testnet/core/streamline/complete/streamline__testnet_blocks_complete.sql @@ -0,0 +1,33 @@ +-- depends_on: {{ ref('bronze_testnet__blocks') }} + +{{ config ( + materialized = "incremental", + unique_key = "block_number", + cluster_by = "ROUND(block_number, -3)", + post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)", + tags = ['streamline_testnet_complete'] +) }} + +SELECT + block_number, + file_name, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_testnet_blocks_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze_testnet__blocks') }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze_testnet__blocks_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1 \ No newline at end of file diff --git a/models/testnet/core/streamline/complete/streamline__testnet_receipts_complete.sql b/models/testnet/core/streamline/complete/streamline__testnet_receipts_complete.sql new file mode 100644 index 0000000..1e12d80 --- /dev/null +++ b/models/testnet/core/streamline/complete/streamline__testnet_receipts_complete.sql @@ -0,0 +1,33 @@ +-- depends_on: {{ ref('bronze_testnet__receipts') }} + +{{ config ( + materialized = "incremental", + unique_key = "block_number", + cluster_by = "ROUND(block_number, -3)", + post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)", + tags = ['streamline_testnet_complete'] +) }} + +SELECT + block_number, + file_name, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_testnet_receipts_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze_testnet__receipts') }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze_testnet__receipts_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1 \ No newline at end of file diff --git a/models/testnet/core/streamline/complete/streamline__testnet_traces_complete.sql b/models/testnet/core/streamline/complete/streamline__testnet_traces_complete.sql new file mode 100644 index 0000000..020961f --- /dev/null +++ b/models/testnet/core/streamline/complete/streamline__testnet_traces_complete.sql @@ -0,0 +1,33 @@ +-- depends_on: {{ ref('bronze_testnet__traces') }} + +{{ config ( + materialized = "incremental", + unique_key = "block_number", + cluster_by = "ROUND(block_number, -3)", + post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)", + tags = ['streamline_testnet_complete'] +) }} + +SELECT + block_number, + file_name, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_testnet_traces_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze_testnet__traces') }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze_testnet__traces_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1 \ No newline at end of file diff --git a/models/testnet/core/streamline/complete/streamline__testnet_transactions_complete.sql b/models/testnet/core/streamline/complete/streamline__testnet_transactions_complete.sql new file mode 100644 index 0000000..265502a --- /dev/null +++ b/models/testnet/core/streamline/complete/streamline__testnet_transactions_complete.sql @@ -0,0 +1,33 @@ +-- depends_on: {{ ref('bronze_testnet__transactions') }} + +{{ config ( + materialized = "incremental", + unique_key = "block_number", + cluster_by = "ROUND(block_number, -3)", + post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)", + tags = ['streamline_testnet_complete'] +) }} + +SELECT + block_number, + file_name, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_testnet_transactions_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze_testnet__transactions') }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze_testnet__transactions_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1 \ No newline at end of file diff --git a/models/testnet/core/streamline/history/streamline__testnet_blocks_transactions_history.sql b/models/testnet/core/streamline/history/streamline__testnet_blocks_transactions_history.sql new file mode 100644 index 0000000..9205666 --- /dev/null +++ b/models/testnet/core/streamline/history/streamline__testnet_blocks_transactions_history.sql @@ -0,0 +1,54 @@ +{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %} + +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params ={ "external_table" :"testnet_blocks_transactions", + "sql_limit" :"2000000", + "producer_batch_size" :"1800", + "worker_batch_size" :"1800", + "sql_source" :"{{this.identifier}}", + "exploded_key": tojson(["result", "result.transactions"]) } + ), + tags = ['streamline_testnet_history'] +) }} + +WITH to_do AS ( + SELECT block_number + FROM {{ ref("streamline__testnet_blocks") }} + EXCEPT + SELECT block_number + FROM {{ ref("streamline__testnet_blocks_complete") }} b + INNER JOIN {{ ref("streamline__testnet_transactions_complete") }} t USING(block_number) +), +ready_blocks AS ( + SELECT block_number + FROM to_do +) +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{Service}/{Authentication}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', 'streamline' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', 'eth_getBlockByNumber', + 'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE) + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +ORDER BY block_number desc + +LIMIT + 2000000 \ No newline at end of file diff --git a/models/testnet/core/streamline/history/streamline__testnet_receipts_history.sql b/models/testnet/core/streamline/history/streamline__testnet_receipts_history.sql new file mode 100644 index 0000000..7853dd2 --- /dev/null +++ b/models/testnet/core/streamline/history/streamline__testnet_receipts_history.sql @@ -0,0 +1,52 @@ +{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %} + +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params ={ "external_table" :"testnet_receipts", + "sql_limit" :"2000000", + "producer_batch_size" :"1800", + "worker_batch_size" :"1800", + "sql_source" :"{{this.identifier}}" } + ), + tags = ['streamline_testnet_history'] +) }} + +WITH to_do AS ( + SELECT block_number + FROM {{ ref("streamline__testnet_blocks") }} + EXCEPT + SELECT block_number + FROM {{ ref("streamline__testnet_receipts_complete") }} +), +ready_blocks AS ( + SELECT block_number + FROM to_do +) +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{Service}/{Authentication}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', 'streamline' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', 'eth_getBlockReceipts', + 'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number)) + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +ORDER BY block_number desc + +LIMIT + 2000000 \ No newline at end of file diff --git a/models/testnet/core/streamline/history/streamline__testnet_traces_history.sql b/models/testnet/core/streamline/history/streamline__testnet_traces_history.sql new file mode 100644 index 0000000..3b61055 --- /dev/null +++ b/models/testnet/core/streamline/history/streamline__testnet_traces_history.sql @@ -0,0 +1,53 @@ +{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %} + +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params ={ "external_table" :"testnet_traces", + "sql_limit" :"2000000", + "producer_batch_size" :"1800", + "worker_batch_size" :"1800", + "sql_source" :"{{this.identifier}}", + "exploded_key": tojson(["result"]) } + ), + tags = ['streamline_testnet_history'] +) }} + +WITH to_do AS ( + SELECT block_number + FROM {{ ref("streamline__testnet_blocks") }} + EXCEPT + SELECT block_number + FROM {{ ref("streamline__testnet_traces_complete") }} +), +ready_blocks AS ( + SELECT block_number + FROM to_do +) +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{Service}/{Authentication}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', 'streamline' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', 'debug_traceBlockByNumber', + 'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s')) + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +ORDER BY block_number desc + +LIMIT + 2000000 \ No newline at end of file diff --git a/models/testnet/core/streamline/realtime/streamline__testnet_blocks_transactions_realtime.sql b/models/testnet/core/streamline/realtime/streamline__testnet_blocks_transactions_realtime.sql new file mode 100644 index 0000000..8d7d984 --- /dev/null +++ b/models/testnet/core/streamline/realtime/streamline__testnet_blocks_transactions_realtime.sql @@ -0,0 +1,54 @@ +{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %} + +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params ={ "external_table" :"testnet_blocks_transactions", + "sql_limit" :"7200", + "producer_batch_size" :"3600", + "worker_batch_size" :"1800", + "sql_source" :"{{this.identifier}}", + "exploded_key": tojson(["result", "result.transactions"]) } + ), + tags = ['streamline_testnet_realtime'] +) }} + +WITH to_do AS ( + SELECT block_number + FROM {{ ref("streamline__testnet_blocks") }} + EXCEPT + SELECT block_number + FROM {{ ref("streamline__testnet_blocks_complete") }} b + INNER JOIN {{ ref("streamline__testnet_transactions_complete") }} t USING(block_number) +), +ready_blocks AS ( + SELECT block_number + FROM to_do +) +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{Service}/{Authentication}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', 'streamline' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', 'eth_getBlockByNumber', + 'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE) + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +ORDER BY block_number desc + +LIMIT + 7200 \ No newline at end of file diff --git a/models/testnet/core/streamline/realtime/streamline__testnet_receipts_realtime.sql b/models/testnet/core/streamline/realtime/streamline__testnet_receipts_realtime.sql new file mode 100644 index 0000000..fefd652 --- /dev/null +++ b/models/testnet/core/streamline/realtime/streamline__testnet_receipts_realtime.sql @@ -0,0 +1,52 @@ +{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %} + +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params ={ "external_table" :"testnet_receipts", + "sql_limit" :"7200", + "producer_batch_size" :"1800", + "worker_batch_size" :"1800", + "sql_source" :"{{this.identifier}}" } + ), + tags = ['streamline_testnet_realtime'] +) }} + +WITH to_do AS ( + SELECT block_number + FROM {{ ref("streamline__testnet_blocks") }} + EXCEPT + SELECT block_number + FROM {{ ref("streamline__testnet_receipts_complete") }} +), +ready_blocks AS ( + SELECT block_number + FROM to_do +) +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{Service}/{Authentication}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', 'streamline' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', 'eth_getBlockReceipts', + 'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number)) + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +ORDER BY block_number desc + +LIMIT + 7200 \ No newline at end of file diff --git a/models/testnet/core/streamline/realtime/streamline__testnet_traces_realtime.sql b/models/testnet/core/streamline/realtime/streamline__testnet_traces_realtime.sql new file mode 100644 index 0000000..8b05706 --- /dev/null +++ b/models/testnet/core/streamline/realtime/streamline__testnet_traces_realtime.sql @@ -0,0 +1,53 @@ +{% set node_secret_path = var("GLOBAL_NODE_SECRET_PATH") %} + +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params ={ "external_table" :"testnet_traces", + "sql_limit" :"7200", + "producer_batch_size" :"1800", + "worker_batch_size" :"1800", + "sql_source" :"{{this.identifier}}", + "exploded_key": tojson(["result"]) } + ), + tags = ['streamline_testnet_realtime'] +) }} + +WITH to_do AS ( + SELECT block_number + FROM {{ ref("streamline__testnet_blocks") }} + EXCEPT + SELECT block_number + FROM {{ ref("streamline__testnet_traces_complete") }} +), +ready_blocks AS ( + SELECT block_number + FROM to_do +) +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{Service}/{Authentication}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', 'streamline' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', 'debug_traceBlockByNumber', + 'params', ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s')) + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +ORDER BY block_number desc + +LIMIT + 7200 \ No newline at end of file diff --git a/models/testnet/core/streamline/streamline__get_testnet_chainhead.sql b/models/testnet/core/streamline/streamline__get_testnet_chainhead.sql new file mode 100644 index 0000000..7ce2e82 --- /dev/null +++ b/models/testnet/core/streamline/streamline__get_testnet_chainhead.sql @@ -0,0 +1,28 @@ +{{ config ( + materialized = 'table', + tags = ['streamline_testnet_complete','chainhead'] +) }} + +SELECT + live.udf_api( + 'POST', + '{Service}/{Authentication}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', 'LiveQuery' + ), + OBJECT_CONSTRUCT( + 'id', + 0, + 'jsonrpc', + '2.0', + 'method', + 'eth_blockNumber', + 'params', + [] + ), + '{{ var('GLOBAL_NODE_SECRET_PATH') }}' + ) AS resp, + utils.udf_hex_to_int( + resp :data :result :: STRING + ) AS block_number \ No newline at end of file diff --git a/models/testnet/core/streamline/streamline__get_testnet_chainhead.yml b/models/testnet/core/streamline/streamline__get_testnet_chainhead.yml new file mode 100644 index 0000000..c2a8418 --- /dev/null +++ b/models/testnet/core/streamline/streamline__get_testnet_chainhead.yml @@ -0,0 +1,9 @@ +version: 2 +models: + - name: streamline__get_testnet_chainhead + description: "This model is used to get the chainhead from the blockchain." + + columns: + - name: BLOCK_NUMBER + tests: + - not_null \ No newline at end of file diff --git a/models/testnet/core/streamline/streamline__testnet_blocks.sql b/models/testnet/core/streamline/streamline__testnet_blocks.sql new file mode 100644 index 0000000..ec03fc0 --- /dev/null +++ b/models/testnet/core/streamline/streamline__testnet_blocks.sql @@ -0,0 +1,24 @@ +{{ config ( + materialized = "view", + tags = ['streamline_testnet_complete'] +) }} + +SELECT + _id, + ( + ({{ var('GLOBAL_BLOCKS_PER_HOUR',0) }} / 60) * {{ var('GLOBAL_CHAINHEAD_DELAY',3) }} + ) :: INT AS block_number_delay, --minute-based block delay + (_id - block_number_delay) :: INT AS block_number, + utils.udf_int_to_hex(block_number) AS block_number_hex +FROM + {{ ref('utils__number_sequence') }} +WHERE + _id <= ( + SELECT + COALESCE( + block_number, + 0 + ) + FROM + {{ ref("streamline__get_testnet_chainhead") }} + ) \ No newline at end of file diff --git a/models/testnet/core/utils/utils__number_sequence.sql b/models/testnet/core/utils/utils__number_sequence.sql new file mode 100644 index 0000000..f98859d --- /dev/null +++ b/models/testnet/core/utils/utils__number_sequence.sql @@ -0,0 +1,19 @@ +{{ config( + materialized = 'incremental', + cluster_by = 'round(_id,-3)', + post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(_id)", + full_refresh = false, + tags = ['utils'] +) }} + +SELECT + ROW_NUMBER() over ( + ORDER BY + SEQ4() + ) - 1 :: INT AS _id +FROM + TABLE(GENERATOR(rowcount => 1000000000)) +WHERE 1=1 +{% if is_incremental() %} + AND 1=0 +{% endif %} \ No newline at end of file diff --git a/packages.yml b/packages.yml index 52e2031..4d95580 100644 --- a/packages.yml +++ b/packages.yml @@ -1,3 +1,3 @@ packages: - git: https://github.com/FlipsideCrypto/fsc-evm.git - revision: + revision: v3.21.1 \ No newline at end of file diff --git a/profiles.yml b/profiles.yml index 229f691..4525481 100644 --- a/profiles.yml +++ b/profiles.yml @@ -1,4 +1,4 @@ -: +monad: target: prod outputs: dev: