AN-5951/slack-alerts-gnosis (#199)

This commit is contained in:
Sam 2025-04-23 21:50:59 +08:00 committed by GitHub
parent b5541eda3a
commit 5d7df29366
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
20 changed files with 313 additions and 169 deletions

View File

@ -11,14 +11,14 @@ on:
description: Name of the workflow to perform the action on, no .yml extension
required: true
task_action:
type: choice
type: choice
description: Action to perform
required: true
options:
- SUSPEND
- RESUME
default: SUSPEND
env:
DBT_PROFILES_DIR: ./
@ -34,15 +34,20 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_alter_gha_tasks.yml@AN-4374/upgrade-dbt-1.7
with:
workflow_name: |
${{ inputs.workflow_name }}
${{ inputs.workflow_name }}
task_action: |
${{ inputs.task_action }}
${{ inputs.task_action }}
environment: workflow_prod
secrets: inherit
secrets: inherit
notify-failure:
needs: [called_workflow_template]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -21,12 +21,10 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
environment:
name: workflow_prod
steps:
@ -41,10 +39,10 @@ jobs:
run: |
pip install -r requirements.txt
dbt deps
- name: refresh ddl for datashare
run: |
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
- name: checkout docs branch
run: |
git checkout -B docs origin/main
@ -71,4 +69,11 @@ jobs:
git commit -am "Auto-update docs"
- name: push changes to docs
run: |
git push -f --set-upstream origin docs
git push -f --set-upstream origin docs
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -5,8 +5,8 @@ on:
workflow_dispatch:
schedule:
# Runs “At minute 0 past every 12th hour.” (see https://crontab.guru)
- cron: '0 */12 * * *'
- cron: "0 */12 * * *"
env:
DBT_PROFILES_DIR: ./
@ -22,12 +22,10 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
environment:
name: workflow_prod
steps:
@ -49,4 +47,11 @@ jobs:
- name: Kick off decoded logs history, if there are new ABIs from users
run: |
dbt run-operation run_decoded_logs_history
dbt run-operation run_decoded_logs_history
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -7,7 +7,7 @@ on:
- "main"
inputs:
environment:
type: choice
type: choice
description: DBT Run Environment
required: true
options:
@ -15,9 +15,9 @@ on:
- prod
default: dev
warehouse:
type: choice
type: choice
description: Snowflake warehouse
required: true
required: true
options:
- DBT
- DBT_CLOUD
@ -26,9 +26,9 @@ on:
default: DBT
dbt_command:
type: string
description: 'DBT Run Command'
description: "DBT Run Command"
required: true
env:
DBT_PROFILES_DIR: ./
@ -44,12 +44,10 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
environment:
name: workflow_${{ inputs.environment }}
steps:
@ -66,4 +64,11 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}
${{ inputs.dbt_command }}
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -1,69 +0,0 @@
name: dbt_run_deployment
run-name: dbt_run_deployment
on:
workflow_dispatch:
branches:
- "main"
inputs:
warehouse:
type: choice
description: Snowflake warehouse
required: true
options:
- DBT
- DBT_CLOUD
- DBT_EMERGENCY
default: DBT
dbt_command:
type: string
description: 'DBT Run Command'
required: true
env:
DBT_PROFILES_DIR: ./
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ inputs.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
dbt:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}
- name: Run datashare model
run: |
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
dbt run-operation run_query --args "{sql: call admin.datashare.sp_grant_share_permissions('${{ env.DATABASE }}')}"
- name: Store logs
uses: actions/upload-artifact@v3
with:
name: dbt-logs
path: |
logs
target

View File

@ -5,8 +5,8 @@ on:
workflow_dispatch:
schedule:
# Runs "at 6:30 UTC" (see https://crontab.guru)
- cron: '30 6 * * 1'
- cron: "30 6 * * 1"
env:
DBT_PROFILES_DIR: ./
@ -22,12 +22,10 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
run_dbt_jobs_refresh:
runs-on: ubuntu-latest
environment:
environment:
name: workflow_prod
steps:
@ -44,4 +42,41 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run-operation run_sp_create_prod_clone
dbt run-operation run_sp_create_prod_clone
notify-failure:
needs: [run_dbt_jobs_refresh]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
run_dbt_jobs_udfs:
runs-on: ubuntu-latest
needs: run_dbt_jobs_refresh
environment:
name: workflow_dev
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run Recreate UDFs
run: |
dbt run-operation fsc_utils.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
dbt run -s livequery_models.deploy.core._live --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
notify-failure2:
needs: [run_dbt_jobs_udfs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -5,8 +5,8 @@ on:
workflow_dispatch:
schedule:
# Runs “At 18:00 on day-of-month 1.” (see https://crontab.guru)
- cron: '0 18 1 * *'
- cron: "0 18 1 * *"
env:
DBT_PROFILES_DIR: ./
@ -22,12 +22,10 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
environment:
name: workflow_prod_2xl
steps:
@ -45,4 +43,10 @@ jobs:
- name: Run DBT Jobs
run: |
dbt run --threads 2 --vars '{"OBSERV_FULL_TEST":True}' -m "gnosis_models,tag:observability"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -5,8 +5,8 @@ on:
workflow_dispatch:
schedule:
# Runs at 04:55 on Wednesday (see https://crontab.guru)
- cron: '55 4 * * 3'
- cron: "55 4 * * 3"
env:
DBT_PROFILES_DIR: ./
@ -22,12 +22,10 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
environment:
name: workflow_prod
steps:
@ -44,4 +42,11 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m "gnosis_models,tag:heal" --vars '{"HEAL_MODEL":True}'
dbt run -m "gnosis_models,tag:heal" --vars '{"HEAL_MODEL":True}'
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -5,8 +5,8 @@ on:
workflow_dispatch:
schedule:
# Runs at minute 40 every Monday (see https://crontab.guru)
- cron: '40 0 * * 1'
- cron: "40 0 * * 1"
env:
DBT_PROFILES_DIR: ./
@ -22,12 +22,10 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
environment:
name: workflow_prod
steps:
@ -48,11 +46,18 @@ jobs:
run: |
reorg_model_list=$(dbt list --select "gnosis_models,tag:reorg" --resource-type model --output name | grep '__' | awk -F'.' '{print $NF}' | tr '\n' ',' | sed 's/,$//')
echo "model_list=$reorg_model_list" >> $GITHUB_OUTPUT
- name: Execute block_reorg macro
run: |
dbt run-operation fsc_utils.block_reorg --args "{reorg_model_list: '${{ steps.list_models.outputs.model_list }}', hours: '169'}" && awk '/SQL status/ {print; next} /DELETE FROM/{getline; print} /\/\* {/ {print}' logs/dbt.log
- name: Execute decoded_logs_cleanup macro
run: |
dbt run-operation decoded_logs_cleanup
dbt run-operation decoded_logs_cleanup
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -5,7 +5,7 @@ on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
@ -21,12 +21,10 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
environment:
name: workflow_prod
steps:
@ -43,4 +41,11 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m "gnosis_models,tag:curated"
dbt run -m "gnosis_models,tag:curated"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -5,7 +5,7 @@ on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
@ -21,12 +21,10 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
environment:
name: workflow_prod
steps:
@ -43,4 +41,11 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "gnosis_models,tag:non_realtime" "gnosis_models,tag:streamline_decoded_logs_complete" "gnosis_models,tag:streamline_decoded_logs_realtime"
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "gnosis_models,tag:non_realtime" "gnosis_models,tag:streamline_decoded_logs_complete" "gnosis_models,tag:streamline_decoded_logs_realtime"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -21,8 +21,6 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
@ -44,7 +42,14 @@ jobs:
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "gnosis_models,tag:streamline_core_complete" "gnosis_models,tag:streamline_core_realtime" "gnosis_models,tag:streamline_core_complete_receipts" "gnosis_models,tag:streamline_core_realtime_receipts" "gnosis_models,tag:streamline_core_complete_confirm_blocks" "gnosis_models,tag:streamline_core_realtime_confirm_blocks"
- name: Run Chainhead Tests
run: |
dbt test -m "gnosis_models,tag:chainhead"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -46,4 +46,11 @@ jobs:
- name: Decode historical logs
run: |
dbt run-operation decoded_logs_history --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
dbt run-operation decoded_logs_history --vars '{"STREAMLINE_INVOKE_STREAMS":True}'
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -5,7 +5,7 @@ on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
@ -21,12 +21,10 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
environment:
name: workflow_prod
steps:
@ -43,4 +41,11 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m "gnosis_models,tag:decoded_logs"
dbt run -m "gnosis_models,tag:decoded_logs"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -7,7 +7,7 @@ on:
- "main"
inputs:
environment:
type: choice
type: choice
description: DBT Run Environment
required: true
options:
@ -16,9 +16,9 @@ on:
- prod_backfill
default: dev
warehouse:
type: choice
type: choice
description: Snowflake warehouse
required: true
required: true
options:
- DBT
- DBT_CLOUD
@ -26,12 +26,12 @@ on:
default: DBT
dbt_command:
type: choice
description: 'DBT Run Command'
description: "DBT Run Command"
required: true
options:
- dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "gnosis_models,tag:streamline_core_complete" "gnosis_models,tag:streamline_core_history" "gnosis_models,tag:streamline_core_complete_receipts" "gnosis_models,tag:streamline_core_history_receipts" "gnosis_models,tag:streamline_core_complete_confirm_blocks" "gnosis_models,tag:streamline_core_history_confirm_blocks"
- dbt run --threads 8 --vars '{"STREAMLINE_INVOKE_STREAMS":True,"WAIT":120}' -m "gnosis_models,tag:streamline_decoded_logs_complete" "gnosis_models,tag:streamline_decoded_logs_history"
env:
DBT_PROFILES_DIR: ./
@ -47,12 +47,10 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
environment:
name: workflow_${{ inputs.environment }}
steps:
@ -69,4 +67,11 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}
${{ inputs.dbt_command }}
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -5,8 +5,8 @@ on:
workflow_dispatch:
schedule:
# Runs "at 9:00 UTC" (see https://crontab.guru)
- cron: '0 9 * * *'
- cron: "0 9 * * *"
env:
DBT_PROFILES_DIR: ./
@ -22,12 +22,10 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
environment:
name: workflow_test
steps:
@ -45,4 +43,10 @@ jobs:
- name: Run DBT Jobs
run: |
dbt test --exclude "gnosis_models,tag:full_test" "gnosis_models,tag:recent_test" "gnosis_models,tag:gha_tasks" livequery_models
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -5,7 +5,7 @@ on:
workflow_dispatch:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
@ -21,12 +21,10 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
environment:
name: workflow_test
steps:
@ -44,4 +42,11 @@ jobs:
- name: Run DBT Jobs
run: |
dbt run -m "gnosis_models,tag:observability"
dbt test -m "gnosis_models,tag:recent_test"
dbt test -m "gnosis_models,tag:recent_test"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -5,8 +5,8 @@ on:
workflow_dispatch:
schedule:
# Runs “At 18:00 on the 28th of the month.” (see https://crontab.guru)
- cron: '0 18 28 * *'
- cron: "0 18 28 * *"
env:
DBT_PROFILES_DIR: ./
@ -22,12 +22,10 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
environment:
name: workflow_test
steps:
@ -45,6 +43,10 @@ jobs:
- name: Run DBT Jobs
run: |
dbt test -m "gnosis_models,tag:full_test"
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

27
.github/workflows/slack_notify.yml vendored Normal file
View File

@ -0,0 +1,27 @@
name: Slack Notification
on:
workflow_call:
secrets:
SLACK_WEBHOOK_URL:
required: true
jobs:
notify:
runs-on: ubuntu-latest
environment: workflow_prod
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Install dependencies
run: pip install requests
- name: Send Slack notification
run: python python/slack_alert.py
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

74
python/slack_alert.py Normal file
View File

@ -0,0 +1,74 @@
import requests
import os
import sys
def create_message():
"""Creates a simple failure notification message with repo, workflow name, and URL"""
# Get GitHub environment variables
repository = os.environ.get('GITHUB_REPOSITORY', 'Unknown repository')
repo_name = repository.split('/')[-1] if '/' in repository else repository
workflow_name = os.environ.get('GITHUB_WORKFLOW', 'Unknown workflow')
run_id = os.environ.get('GITHUB_RUN_ID', '')
server_url = os.environ.get('GITHUB_SERVER_URL', 'https://github.com')
# Build the workflow URL
workflow_url = f"{server_url}/{repository}/actions/runs/{run_id}"
message_body = {
"text": f"Failure in {repo_name}",
"attachments": [
{
"color": "#f44336", # Red color for failures
"fields": [
{
"title": "Repository",
"value": repository,
"short": True
},
{
"title": "Workflow",
"value": workflow_name,
"short": True
}
],
"actions": [
{
"type": "button",
"text": "View Workflow Run",
"style": "primary",
"url": workflow_url
}
],
"footer": "GitHub Actions"
}
]
}
return message_body
def send_alert(webhook_url):
"""Sends a failure notification to Slack"""
message = create_message()
try:
response = requests.post(webhook_url, json=message)
if response.status_code == 200:
print("Successfully sent Slack notification")
else:
print(f"Failed to send Slack notification: {response.status_code} {response.text}")
sys.exit(1)
except Exception as e:
print(f"Error sending Slack notification: {str(e)}")
sys.exit(1)
if __name__ == '__main__':
webhook_url = os.environ.get("SLACK_WEBHOOK_URL")
if not webhook_url:
print("ERROR: SLACK_WEBHOOK_URL environment variable is required")
sys.exit(1)
send_alert(webhook_url)