more workflows

This commit is contained in:
drethereum 2024-07-08 14:16:11 -06:00
parent d5654062c2
commit b63eb86aee
17 changed files with 422 additions and 1 deletions

View File

@ -0,0 +1,46 @@
name: dbt_alter_gha_task
run-name: dbt_alter_gha_task
on:
workflow_dispatch:
branches:
- "main"
inputs:
workflow_name:
type: string
description: Name of the workflow to perform the action on, no .yml extension
required: true
task_action:
type: choice
description: Action to perform
required: true
options:
- SUSPEND
- RESUME
default: SUSPEND
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_alter_gha_tasks.yml@main
with:
workflow_name: |
${{ inputs.workflow_name }}
task_action: |
${{ inputs.task_action }}
environment: workflow_prod
secrets: inherit

72
.github/workflows/dbt_docs_update.yml vendored Normal file
View File

@ -0,0 +1,72 @@
name: docs_update
on:
push:
branches:
- "main"
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: refresh ddl for datashare
run: |
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
- name: checkout docs branch
run: |
git checkout -B docs origin/main
- name: generate dbt docs
run: dbt docs generate -t prod
- name: move files to docs directory
run: |
mkdir -p ./docs
cp target/{catalog.json,manifest.json,index.html} docs/
- name: clean up target directory
run: dbt clean
- name: check for changes
run: git status
- name: stage changed files
run: git add .
- name: commit changed files
run: |
git config user.email "abc@xyz"
git config user.name "github-actions"
git commit -am "Auto-update docs"
- name: push changes to docs
run: |
git push -f --set-upstream origin docs

View File

@ -0,0 +1,67 @@
name: dbt_run_deployment
run-name: ${{ inputs.dbt_command }}
on:
workflow_dispatch:
branches:
- "main"
inputs:
warehouse:
type: choice
description: Snowflake warehouse
required: true
options:
- DBT
- DBT_CLOUD
- DBT_EMERGENCY
default: DBT
dbt_command:
type: string
description: 'DBT Run Command'
required: true
env:
DBT_PROFILES_DIR: ./
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ inputs.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
dbt:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}
- name: Run datashare model
run: |
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -gt 0 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
dbt run-operation run_query --args "{sql: call admin.datashare.sp_grant_share_permissions('${{ env.DATABASE }}')}"
- name: Store logs
uses: actions/upload-artifact@v3
with:
name: dbt-logs
path: |
logs
target

View File

@ -0,0 +1,44 @@
name: dbt_run_scheduled_non_realtime
run-name: dbt_run_scheduled_non_realtime
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "berachain_models,tag:non_realtime"

48
.github/workflows/dbt_test_daily.yml vendored Normal file
View File

@ -0,0 +1,48 @@
name: dbt_test_daily
run-name: dbt_test_daily
on:
workflow_dispatch:
schedule:
# Runs "at 9:00 UTC" (see https://crontab.guru)
- cron: '0 9 * * *'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_test
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt test --exclude "berachain_models,tag:full_test" "berachain_models,tag:recent_test" "berachain_models,tag:gha_tasks" livequery_models

27
.github/workflows/dbt_test_tasks.yml vendored Normal file
View File

@ -0,0 +1,27 @@
name: dbt_test_tasks
run-name: dbt_test_tasks
on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_test_tasks.yml@main
secrets: inherit

View File

@ -1,2 +1,3 @@
workflow_name,workflow_schedule
dbt_run_streamline_chainhead,"3,33 * * * *"
dbt_run_streamline_chainhead,"3,33 * * * *"
dbt_run_scheduled_non_realtime,"15,45 * * * *"
1 workflow_name workflow_schedule
2 dbt_run_streamline_chainhead 3,33 * * * *
3 dbt_run_scheduled_non_realtime 15,45 * * * *

View File

@ -0,0 +1,34 @@
{{ config (
materialized = "ephemeral"
) }}
WITH lookback AS (
SELECT
block_number
FROM
{{ ref("_block_lookback") }}
)
SELECT
DISTINCT t.block_number AS block_number
FROM
{{ ref("silver_testnet__transactions") }}
t
LEFT JOIN {{ ref("silver_testnet__receipts") }}
r USING (
block_number,
block_hash,
tx_hash
)
WHERE
r.tx_hash IS NULL
AND t.block_number >= (
SELECT
block_number
FROM
lookback
)
AND t.block_timestamp >= DATEADD('hour', -84, SYSDATE())
AND (
r._inserted_timestamp >= DATEADD('hour', -84, SYSDATE())
OR r._inserted_timestamp IS NULL)

View File

@ -0,0 +1,30 @@
{{ config (
materialized = "ephemeral"
) }}
WITH lookback AS (
SELECT
block_number
FROM
{{ ref("_block_lookback") }}
)
SELECT
DISTINCT tx.block_number block_number
FROM
{{ ref("silver_testnet__transactions") }}
tx
LEFT JOIN {{ ref("silver_testnet__traces") }}
tr
ON tx.block_number = tr.block_number
AND tx.tx_hash = tr.tx_hash
AND tr.block_timestamp >= DATEADD('hour', -84, SYSDATE())
WHERE
tx.block_timestamp >= DATEADD('hour', -84, SYSDATE())
AND tr.tx_hash IS NULL
AND tx.block_number >= (
SELECT
block_number
FROM
lookback
)

View File

@ -0,0 +1,40 @@
{{ config (
materialized = "ephemeral"
) }}
WITH lookback AS (
SELECT
block_number
FROM
{{ ref("_block_lookback") }}
),
transactions AS (
SELECT
block_number,
POSITION,
LAG(
POSITION,
1
) over (
PARTITION BY block_number
ORDER BY
POSITION ASC
) AS prev_POSITION
FROM
{{ ref("silver_testnet__transactions") }}
WHERE
block_timestamp >= DATEADD('hour', -84, SYSDATE())
AND block_number >= (
SELECT
block_number
FROM
lookback
)
)
SELECT
DISTINCT block_number AS block_number
FROM
transactions
WHERE
POSITION - prev_POSITION <> 1

0
tests/.gitkeep Normal file
View File

View File

@ -0,0 +1,2 @@
-- depends_on: {{ ref('test_silver__transactions_full') }}
{{ missing_txs(ref("test_silver__receipts_full")) }}

View File

@ -0,0 +1,2 @@
-- depends_on: {{ ref('test_silver__transactions_full') }}
{{ missing_txs(ref("test_silver__traces_full")) }}

View File

@ -0,0 +1,2 @@
-- depends_on: {{ ref('test_silver__blocks_full') }}
{{ fsc_utils.tx_gaps(ref("test_silver__transactions_full")) }}

View File

@ -0,0 +1,2 @@
-- depends_on: {{ ref('test_silver__transactions_recent') }}
{{ recent_missing_txs(ref("test_silver__receipts_recent")) }}

View File

@ -0,0 +1,2 @@
-- depends_on: {{ ref('test_silver__transactions_recent') }}
{{ recent_missing_txs(ref("test_silver__traces_recent")) }}

View File

@ -0,0 +1,2 @@
-- depends_on: {{ ref('test_silver__blocks_recent') }}
{{ fsc_utils.recent_tx_gaps(ref("test_silver__transactions_recent")) }}