reorg/folder-tests (#141)

* initial folder structure reorg

* initial workflow reorg + python version

* observability tags

* abis tags

* streamline tags

* non_realtime tags

* missed tags

* comments

* changed folder name

* removed legacy workflows

* decoder workflows

* requirements

* dbt version
This commit is contained in:
drethereum 2023-08-02 09:32:04 -06:00 committed by GitHub
parent 1ea173e5d8
commit c4cfb01f31
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
304 changed files with 463 additions and 318 deletions

View File

@ -31,17 +31,18 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
- uses: actions/setup-python@v4
with:
python-version: "3.7.x"
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m models/silver/abis
dbt run -m tag:abis

View File

@ -52,13 +52,14 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
- uses: actions/setup-python@v4
with:
python-version: "3.7.x"
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |

View File

@ -1,44 +0,0 @@
name: dbt_run_api_feeder_table
run-name: dbt_run_api_feeder_table
on:
workflow_dispatch:
schedule:
# Runs "at 8:00 UTC" (see https://crontab.guru)
- cron: '0 8 * * *'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
with:
python-version: "3.7.x"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m models/silver/api_udf

View File

@ -31,13 +31,14 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
- uses: actions/setup-python@v4
with:
python-version: "3.7.x"
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |

View File

@ -31,17 +31,18 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
- uses: actions/setup-python@v4
with:
python-version: "3.7.x"
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run --threads 2 --vars '{"OBSERV_FULL_TEST":True}' -m models/silver/_observability
dbt run --threads 2 --vars '{"OBSERV_FULL_TEST":True}' -m tag:observability

View File

@ -1,45 +0,0 @@
name: dbt_run_scheduled
run-name: dbt_run_scheduled
on:
workflow_dispatch:
schedule:
# Runs "at minute 35, every hour" (see https://crontab.guru)
- cron: '35 * * * *'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
with:
python-version: "3.7.x"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
dbt run --exclude models/silver/api_udf models/silver/abis models/silver/streamline models/silver/silver__decoded_logs.sql models/silver/core/tests
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m models/silver/streamline/decoder/streamline__decode_logs_realtime.sql models/silver/streamline/decoder/streamline__complete_decode_logs.sql

View File

@ -1,11 +1,12 @@
name: dbt_run_streamline_history
run-name: dbt_run_streamline_history
name: dbt_run_scheduled_non_realtime
run-name: dbt_run_scheduled_non_realtime
on:
workflow_dispatch:
branches:
- "main"
schedule:
# Runs "at minute 35, every hour" (see https://crontab.guru)
- cron: '35 * * * *'
env:
DBT_PROFILES_DIR: ./
@ -24,21 +25,22 @@ concurrency:
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v2
- uses: actions/setup-python@v4
with:
python-version: '3.7.x'
cache: 'pip'
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip3 install dbt-snowflake~=${{ vars.DBT_VERSION }} cli_passthrough requests click
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/silver/streamline/core/history
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m tag:non_realtime 1+tag:streamline_decoded_logs_realtime

View File

@ -31,14 +31,15 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
- uses: actions/setup-python@v4
with:
python-version: "3.7.x"
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/silver/streamline/core/realtime
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+tag:streamline_core_realtime

View File

@ -31,14 +31,15 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
- uses: actions/setup-python@v4
with:
python-version: "3.7.x"
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m models/silver/silver__decoded_logs.sql
dbt run -m tag:decoded_logs

View File

@ -31,14 +31,15 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
- uses: actions/setup-python@v4
with:
python-version: "3.7.x"
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run --threads 8 --vars '{"STREAMLINE_INVOKE_STREAMS":True,"WAIT":120}' -m models/silver/streamline/decoder/streamline__complete_decode_logs.sql models/silver/streamline/decoder/history
dbt run --threads 8 --vars '{"STREAMLINE_INVOKE_STREAMS":True,"WAIT":120}' -m 1+tag:streamline_decoded_logs_history

View File

@ -0,0 +1,70 @@
name: dbt_run_streamline_history_adhoc
run-name: dbt_run_streamline_history_adhoc
on:
workflow_dispatch:
branches:
- "main"
inputs:
environment:
type: choice
description: DBT Run Environment
required: true
options:
- dev
- prod
- prod_backfill
default: dev
warehouse:
type: choice
description: Snowflake warehouse
required: true
options:
- DBT
- DBT_CLOUD
- DBT_EMERGENCY
default: DBT
dbt_command:
type: choice
description: 'DBT Run Command'
required: true
options:
- dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+tag:streamline_core_history
- dbt run --threads 8 --vars '{"STREAMLINE_INVOKE_STREAMS":True,"WAIT":120}' -m 1+tag:streamline_decoded_logs_history
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ inputs.warehouse }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_${{ inputs.environment }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}

View File

@ -31,13 +31,14 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
- uses: actions/setup-python@v4
with:
python-version: "3.7.x"
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |

View File

@ -31,13 +31,14 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
- uses: actions/setup-python@v4
with:
python-version: "3.7.x"
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |

View File

@ -1,7 +1,8 @@
{{ config(
materialized = 'incremental',
unique_key = "contract_address",
full_refresh = false
full_refresh = false,
tags = ['non_realtime']
) }}
WITH api_keys AS (

View File

@ -1,7 +1,8 @@
{{ config(
materialized = 'incremental',
unique_key = "contract_address",
full_refresh = false
full_refresh = false,
tags = ['non_realtime']
) }}
WITH base AS (

View File

@ -1,6 +1,7 @@
{{ config(
materialized = 'incremental',
unique_key = "state_tx_hash"
unique_key = "state_tx_hash",
tags = ['ethereum','non_realtime']
) }}
SELECT

View File

@ -1,6 +1,7 @@
{{ config(
materialized = 'incremental',
unique_key = "l1_submission_tx_hash"
unique_key = "l1_submission_tx_hash",
tags = ['ethereum','non_realtime']
) }}
SELECT

View File

@ -1,5 +1,6 @@
{{ config(
materialized = "table"
materialized = "table",
tags = ['non_realtime']
) }}
{{ dbt_date.get_date_dimension(
'2017-01-01',

View File

@ -2,7 +2,6 @@
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
tags = ['velodrome'],
meta ={ 'database_tags':{ 'table':{ 'PROTOCOL': 'VELODROME',
'PURPOSE': 'DEFI, DEX' } } }
) }}

View File

@ -2,7 +2,6 @@
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
tags = ['velodrome'],
meta={
'database_tags':{
'table': {

View File

@ -2,7 +2,6 @@
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
tags = ['velodrome'],
meta={
'database_tags':{
'table': {

View File

@ -2,7 +2,6 @@
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
tags = ['velodrome'],
meta={
'database_tags':{
'table': {

View File

@ -2,7 +2,6 @@
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
tags = ['velodrome'],
meta={
'database_tags':{
'table': {

View File

@ -2,7 +2,6 @@
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
tags = ['velodrome'],
meta ={ 'database_tags':{ 'table':{ 'PROTOCOL': 'VELODROME',
'PURPOSE': 'DEFI, DEX, SWAPS' } } }
) }}

View File

@ -2,7 +2,6 @@
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
tags = ['velodrome'],
meta={
'database_tags':{
'table': {

View File

@ -2,7 +2,6 @@
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
tags = ['velodrome'],
meta={
'database_tags':{
'table': {

View File

@ -1,7 +1,8 @@
{{ config(
materialized = 'incremental',
unique_key = 'test_timestamp',
full_refresh = false
full_refresh = false,
tags = ['observability']
) }}
WITH summary_stats AS (

View File

@ -1,5 +1,6 @@
{{ config(
materialized = 'view'
materialized = 'view',
tags = ['observability']
) }}
SELECT

View File

@ -1,7 +1,8 @@
{{ config(
materialized = 'incremental',
unique_key = 'test_timestamp',
full_refresh = false
full_refresh = false,
tags = ['observability']
) }}
WITH summary_stats AS (

View File

@ -1,7 +1,8 @@
{{ config(
materialized = 'incremental',
unique_key = 'test_timestamp',
full_refresh = false
full_refresh = false,
tags = ['observability']
) }}
WITH summary_stats AS (

View File

@ -1,7 +1,8 @@
{{ config(
materialized = 'incremental',
unique_key = 'test_timestamp',
full_refresh = false
full_refresh = false,
tags = ['observability']
) }}
WITH summary_stats AS (

View File

@ -1,7 +1,8 @@
{{ config(
materialized = 'incremental',
unique_key = 'test_timestamp',
full_refresh = false
full_refresh = false,
tags = ['observability']
) }}
WITH summary_stats AS (

View File

@ -1,7 +1,8 @@
{{ config (
materialized = "incremental",
unique_key = "contract_address",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(contract_address)"
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(contract_address)",
tags = ['abis']
) }}
WITH override_abis AS (

View File

@ -1,6 +1,7 @@
{{ config (
materialized = "incremental",
unique_key = "abi_id"
unique_key = "abi_id",
tags = ['abis']
) }}
WITH bytecodes AS (

View File

@ -1,5 +1,6 @@
{{ config (
materialized = 'table'
materialized = 'table',
tags = ['abis']
) }}
WITH abi_base AS (

View File

@ -1,5 +1,6 @@
{{ config(
materialized = 'view'
materialized = 'view',
tags = ['abis']
) }}
SELECT

View File

@ -1,5 +1,6 @@
{{ config (
materialized = "table"
materialized = "table",
tags = ['abis']
) }}
WITH base AS (

View File

@ -1,6 +1,7 @@
{{ config(
materialized = 'table',
unique_key = "contract_address"
unique_key = "contract_address",
tags = ['abis']
) }}
WITH base AS (

View File

@ -1,7 +1,8 @@
{{ config (
materialized = "incremental",
unique_key = "id",
merge_update_columns = ["id"]
merge_update_columns = ["id"],
tags = ['abis']
) }}
WITH base AS (

View File

@ -1,6 +1,7 @@
{{ config(
materialized = 'incremental',
unique_key = "contract_address"
unique_key = "contract_address",
tags = ['abis']
) }}
WITH base AS (

View File

@ -3,7 +3,7 @@
materialized = 'incremental',
unique_key = "block_number",
cluster_by = "block_timestamp::date",
tags = ['core'],
tags = ['core','non_realtime'],
full_refresh = false
) }}

View File

@ -4,7 +4,8 @@
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = "round(block_number,-3)",
full_refresh = false
full_refresh = false,
tags = ['non_realtime']
) }}
WITH base AS (

View File

@ -1,6 +1,7 @@
{{ config(
materialized = 'incremental',
unique_key = 'contract_address'
unique_key = 'contract_address',
tags = ['non_realtime']
) }}
WITH base_metadata AS (

View File

@ -1,6 +1,7 @@
{{ config (
materialized = "incremental",
unique_key = "created_contract_address"
unique_key = "created_contract_address",
tags = ['non_realtime']
) }}
SELECT

View File

@ -5,7 +5,8 @@
cluster_by = "block_timestamp::date",
incremental_predicates = ["dynamic_range", "block_number"],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION",
full_refresh = false
full_refresh = false,
tags = ['decoded_logs']
) }}
WITH base_data AS (

View File

@ -4,7 +4,7 @@
unique_key = "block_number",
cluster_by = "block_timestamp::date, _inserted_timestamp::date",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION",
tags = ['core'],
tags = ['core','non_realtime'],
full_refresh = false
) }}

View File

@ -5,7 +5,7 @@
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(tx_hash)",
tags = ['core'],
tags = ['core','non_realtime'],
full_refresh = false
) }}

View File

@ -1,6 +1,7 @@
{{ config(
materialized = 'table',
unique_key = "contract_address"
unique_key = "contract_address",
tags = ['non_realtime']
) }}
SELECT

Some files were not shown because too many files have changed in this diff Show More