From 8138951db22de6014f6c91b8068c3399c7aa03a8 Mon Sep 17 00:00:00 2001 From: Jensen Yap Date: Fri, 8 Aug 2025 23:42:24 +0900 Subject: [PATCH] [STREAM-1324] Add Slack, Github Action Logs & Slack AI Alerts (#127) * Update GitHub Actions workflow to reduce thread count and add extensive API integration documentation - Changed thread count from 24 to 5 in GitHub Actions workflows for improved performance. - Added comprehensive README files for various API integrations including Alchemy, NBA All Day, API Layer, Binance, and more. - Introduced new UDFs and UDTFs for Groq and Slack API integrations, enhancing functionality and usability. - Implemented tests for new UDFs and UDTFs to ensure reliability and correctness. - Updated existing UDF definitions and added new tests for enhanced coverage and robustness. * Refactor Slack UDFs to use webhook secret names and improve error handling - Updated UDF definitions to replace WEBHOOK_URL and BOT_TOKEN with WEBHOOK_SECRET_NAME for enhanced security. - Improved error messages for required parameters in the SQL logic. - Standardized comments for clarity and consistency across UDFs. - Ensured proper handling of user context for accessing secrets in the vault. * update test * fix test * update slack * remove test * fix tests * fix * fix test args * fix * add .gitignore * Add Slack Notification Macros and Enhance UDFs - Introduced a new dbt macro for sending Slack notifications from GitHub Actions with AI-powered failure analysis. - Added comprehensive README documentation for the new macro, detailing setup, configuration options, and usage examples. - Implemented a SQL macro to handle Slack message formatting and sending, including support for AI analysis and threading. - Updated existing UDFs to utilize webhook secret names for improved security and added detailed comments for clarity. - Enhanced error handling and logging within the macros to ensure robust operation and easier debugging. * update slack alerts * update * remove groq * Enhance Slack Alert Macros with AI Analysis Features - Updated README documentation to clarify AI provider options and added new parameters for model selection and custom prompts. - Modified SQL macros to support the new `model_name` and `ai_prompt` parameters for improved AI analysis capabilities. - Adjusted UDF signatures and comments to reflect the changes in AI provider functionality and requirements. - Improved test cases to validate the new features and ensure robust performance of the Slack alert macros. * update slack_alert * change secret path to data_platform * add backward compatibility for udf_api_v2 * revert to Object return type * update type --- .github/workflows/dbt_udf_test.yml | 13 +- .gitignore | 4 + macros/alerts/README.md | 331 +++++++++ macros/alerts/slack_alert.sql | 309 ++++++++ macros/livequery/udf_compatibility.sql | 26 + macros/marketplace/alchemy/README.md | 288 ++++++++ macros/marketplace/alchemy/util_udfs.yaml.sql | 102 ++- macros/marketplace/allday/README.md | 36 + macros/marketplace/apilayer/README.md | 39 + macros/marketplace/binance/README.md | 39 + macros/marketplace/bitquery/README.md | 45 ++ macros/marketplace/blockpour/README.md | 39 + macros/marketplace/chainbase/README.md | 39 + macros/marketplace/chainstack/README.md | 54 ++ macros/marketplace/claude/README.md | 179 +++++ .../claude/messages_batch_udfs.yaml.sql | 14 +- .../marketplace/claude/messages_udfs.yaml.sql | 8 +- .../marketplace/claude/models_udfs.yaml.sql | 4 +- macros/marketplace/claude/util_udfs.yaml.sql | 94 ++- macros/marketplace/cmc/README.md | 36 + macros/marketplace/coingecko/README.md | 76 ++ macros/marketplace/covalent/README.md | 36 + macros/marketplace/credmark/README.md | 39 + macros/marketplace/dapplooker/README.md | 39 + macros/marketplace/dappradar/README.md | 36 + macros/marketplace/deepnftvalue/README.md | 39 + macros/marketplace/defillama/README.md | 90 +++ macros/marketplace/dune/README.md | 74 ++ macros/marketplace/espn/README.md | 36 + macros/marketplace/footprint/README.md | 39 + macros/marketplace/fred/README.md | 36 + macros/marketplace/github/README.md | 668 ++++++++++++++++++ .../marketplace/github/actions_udfs.yaml.sql | 74 +- .../marketplace/github/actions_udtfs.yml.sql | 191 ++++- macros/marketplace/github/utils_udfs.yaml.sql | 91 ++- macros/marketplace/helius/README.md | 44 ++ macros/marketplace/helius/apis_udfs.yaml.sql | 45 +- .../helius/helius_config_utils.sql | 4 +- macros/marketplace/helius/util_udfs.yaml.sql | 135 +++- macros/marketplace/nftscan/README.md | 36 + macros/marketplace/opensea/README.md | 39 + macros/marketplace/playgrounds/README.md | 39 + macros/marketplace/quicknode/README.md | 44 ++ macros/marketplace/reservoir/README.md | 39 + macros/marketplace/slack/README.md | 294 ++++++++ .../marketplace/slack/messaging_udfs.yaml.sql | 90 +++ macros/marketplace/slack/utils_udfs.yaml.sql | 220 ++++++ macros/marketplace/snapshot/README.md | 45 ++ macros/marketplace/solscan/README.md | 36 + macros/marketplace/stakingrewards/README.md | 36 + macros/marketplace/strangelove/README.md | 39 + macros/marketplace/subquery/README.md | 45 ++ macros/marketplace/topshot/README.md | 36 + macros/marketplace/transpose/README.md | 39 + macros/marketplace/zapper/README.md | 36 + macros/marketplace/zettablock/README.md | 45 ++ macros/tests/udfs.sql | 30 + macros/tests/udtfs.sql | 31 + models/deploy/marketplace/claude/claude__.sql | 1 + .../claude/claude_utils__claude_utils.yml | 4 +- .../github/github_actions__github_utils.yml | 264 ++++++- .../github/github_utils__github_utils.yml | 36 +- models/deploy/marketplace/slack/slack__.sql | 6 + models/deploy/marketplace/slack/slack__.yml | 124 ++++ .../slack/slack_utils__slack_utils.sql | 5 + .../slack/slack_utils__slack_utils.yml | 125 ++++ selectors.yml | 6 + tests/generic/test_udf.sql | 13 + tests/generic/test_udtf.sql | 28 + 69 files changed, 5171 insertions(+), 151 deletions(-) create mode 100644 macros/alerts/README.md create mode 100644 macros/alerts/slack_alert.sql create mode 100644 macros/livequery/udf_compatibility.sql create mode 100644 macros/marketplace/alchemy/README.md create mode 100644 macros/marketplace/allday/README.md create mode 100644 macros/marketplace/apilayer/README.md create mode 100644 macros/marketplace/binance/README.md create mode 100644 macros/marketplace/bitquery/README.md create mode 100644 macros/marketplace/blockpour/README.md create mode 100644 macros/marketplace/chainbase/README.md create mode 100644 macros/marketplace/chainstack/README.md create mode 100644 macros/marketplace/claude/README.md create mode 100644 macros/marketplace/cmc/README.md create mode 100644 macros/marketplace/coingecko/README.md create mode 100644 macros/marketplace/covalent/README.md create mode 100644 macros/marketplace/credmark/README.md create mode 100644 macros/marketplace/dapplooker/README.md create mode 100644 macros/marketplace/dappradar/README.md create mode 100644 macros/marketplace/deepnftvalue/README.md create mode 100644 macros/marketplace/defillama/README.md create mode 100644 macros/marketplace/dune/README.md create mode 100644 macros/marketplace/espn/README.md create mode 100644 macros/marketplace/footprint/README.md create mode 100644 macros/marketplace/fred/README.md create mode 100644 macros/marketplace/github/README.md create mode 100644 macros/marketplace/helius/README.md create mode 100644 macros/marketplace/nftscan/README.md create mode 100644 macros/marketplace/opensea/README.md create mode 100644 macros/marketplace/playgrounds/README.md create mode 100644 macros/marketplace/quicknode/README.md create mode 100644 macros/marketplace/reservoir/README.md create mode 100644 macros/marketplace/slack/README.md create mode 100644 macros/marketplace/slack/messaging_udfs.yaml.sql create mode 100644 macros/marketplace/slack/utils_udfs.yaml.sql create mode 100644 macros/marketplace/snapshot/README.md create mode 100644 macros/marketplace/solscan/README.md create mode 100644 macros/marketplace/stakingrewards/README.md create mode 100644 macros/marketplace/strangelove/README.md create mode 100644 macros/marketplace/subquery/README.md create mode 100644 macros/marketplace/topshot/README.md create mode 100644 macros/marketplace/transpose/README.md create mode 100644 macros/marketplace/zapper/README.md create mode 100644 macros/marketplace/zettablock/README.md create mode 100644 macros/tests/udtfs.sql create mode 100644 models/deploy/marketplace/slack/slack__.sql create mode 100644 models/deploy/marketplace/slack/slack__.yml create mode 100644 models/deploy/marketplace/slack/slack_utils__slack_utils.sql create mode 100644 models/deploy/marketplace/slack/slack_utils__slack_utils.yml create mode 100644 tests/generic/test_udtf.sql diff --git a/.github/workflows/dbt_udf_test.yml b/.github/workflows/dbt_udf_test.yml index 74f6d0e..e6f3f3d 100644 --- a/.github/workflows/dbt_udf_test.yml +++ b/.github/workflows/dbt_udf_test.yml @@ -34,15 +34,6 @@ concurrency: group: ${{ github.workflow }} jobs: - scheduled: - uses: ./.github/workflows/dbt.yml - if: github.event_name == 'schedule' || github.event_name == 'push' - secrets: inherit - with: - warehouse: ${{ vars.WAREHOUSE }} - environment: prod - command: dbt test --selector test_udfs --threads 24 - dispatched: uses: ./.github/workflows/dbt.yml if: github.event_name == 'workflow_dispatch' @@ -50,7 +41,7 @@ jobs: with: warehouse: ${{ inputs.warehouse }} environment: ${{ inputs.environment }} - command: dbt test --selector test_udfs --threads 24 + command: dbt test --selector test_udfs_without_context --threads 5 pull_request: uses: ./.github/workflows/dbt.yml @@ -59,4 +50,4 @@ jobs: with: warehouse: ${{ vars.WAREHOUSE }} environment: dev - command: dbt test --selector test_udfs --threads 24 + command: dbt test --selector test_udfs_without_context --threads 5 diff --git a/.gitignore b/.gitignore index b127376..973c0b1 100644 --- a/.gitignore +++ b/.gitignore @@ -23,3 +23,7 @@ dbt-env/ # Ignore Python bytecode files *.pyc __pycache__/ + +# Claude +.claude/ +CLAUDE.md diff --git a/macros/alerts/README.md b/macros/alerts/README.md new file mode 100644 index 0000000..687add8 --- /dev/null +++ b/macros/alerts/README.md @@ -0,0 +1,331 @@ +# GitHub Actions Slack Notifications + +This directory contains a fast dbt macro system for sending intelligent Slack notifications from GitHub Actions workflows with AI-powered failure analysis. + +## Features + +- **โšก Fast Execution**: Pure SQL dbt macro (no Python overhead) +- **๐Ÿค– AI-Powered Analysis**: Automatic failure analysis using Cortex or Claude AI +- **๐Ÿ’ฌ Rich Slack Messages**: Beautiful Block Kit formatted notifications with color-coded sidebars +- **๐Ÿงต Auto-Threading**: Detailed job logs posted as threaded replies +- **๐ŸŽจ Custom Bot Appearance**: Custom names, emojis, and avatars +- **๐Ÿ”— Dual Delivery Methods**: Support for both webhooks and bot tokens +- **๐Ÿ“Š Comprehensive Details**: Job failures, logs, and actionable links + +## Quick Setup + +The `failed_gha_slack_alert` macro is ready to use immediately - no deployment required! + +### Setup Options + +#### Option 1: Bot Token Method (Recommended) + +1. Create a Slack bot with `chat:write` permissions +2. Get the channel ID from Slack (e.g., `C1234567890` - not channel name) +3. Store bot token in Livequery vault at `_FSC_SYS/SLACK/intelligence` +4. Add this step to your GitHub Actions workflow: + +```yaml +- name: Notify Slack on Failure + if: failure() + run: | + dbt run-operation failed_gha_slack_alert --vars '{ + "owner": "${{ github.repository_owner }}", + "repo": "${{ github.event.repository.name }}", + "run_id": "${{ github.run_id }}", + "slack_channel": "C1234567890" + }' --target dev +``` + +#### Option 2: Webhook Method (Simple Setup) + +1. Create a Slack webhook URL in your workspace +2. Store webhook URL in Livequery vault at `_FSC_SYS/SLACK/alerts` +3. Add this step to your GitHub Actions workflow: + +```yaml +- name: Notify Slack on Failure + if: failure() + run: | + dbt run-operation failed_gha_slack_alert --vars '{ + "owner": "${{ github.repository_owner }}", + "repo": "${{ github.event.repository.name }}", + "run_id": "${{ github.run_id }}", + "webhook_secret_name": "alerts" + }' --target dev +``` + +## Configuration Options + +### Core Parameters + +| Parameter | Type | Required | Description | +|-----------|------|----------|-------------| +| `owner` | string | โœ… | GitHub repository owner | +| `repo` | string | โœ… | GitHub repository name | +| `run_id` | string | โœ… | GitHub Actions run ID | +| `slack_channel` | string | โœ…* | Slack channel ID (e.g., 'C1234567890') - required for bot token method | +| `webhook_secret_name` | string | โœ…* | Webhook vault secret name - required for webhook method | + +### AI & Analysis + +| Parameter | Type | Default | Description | +|-----------|------|---------|-------------| +| `enable_ai_analysis` | boolean | `true` | Enable AI failure analysis | +| `ai_provider` | string | `'cortex'` | AI provider: `'cortex'` (Snowflake built-in AI) | +| `model_name` | string | `'mistral-large'` | **Required for Cortex**: `'mistral-large'`, `'mistral-7b'`, `'llama2-70b-chat'`, `'mixtral-8x7b'` | +| `ai_prompt` | string | `''` | Custom AI analysis prompt (leave empty for default) | + +### Threading & Appearance + +| Parameter | Type | Default | Description | +|-----------|------|---------|-------------| +| `enable_auto_threading` | boolean | `false` | Auto-post detailed job logs as thread replies | +| `username` | string | `'GitHub Actions Bot'` | Custom bot display name | +| `icon_emoji` | string | `':github:'` | Bot emoji (e.g., `:robot_face:`, `:stellar:`) | +| `icon_url` | string | `none` | Bot avatar URL (overrides icon_emoji) | +| `bot_secret_name` | string | `'intelligence'` | Name of bot token secret in vault | + +## Usage Examples + +### Basic Notification + +```bash +dbt run-operation failed_gha_slack_alert --vars '{ + "owner": "FlipsideCrypto", + "repo": "streamline-snowflake", + "run_id": "16729602656", + "slack_channel": "C087GJQ1ZHQ" +}' --target dev +``` + +### AI Analysis with Custom Bot + +```bash +dbt run-operation failed_gha_slack_alert --vars '{ + "owner": "FlipsideCrypto", + "repo": "streamline-snowflake", + "run_id": "16729602656", + "slack_channel": "C087GJQ1ZHQ", + "enable_ai_analysis": true, + "ai_provider": "cortex", + "model_name": "mistral-7b", + "username": "CI/CD Alert Bot", + "icon_emoji": ":robot_face:" +}' --target dev +``` + +### Auto-Threading with Custom Prompt + +```bash +dbt run-operation failed_gha_slack_alert --vars '{ + "owner": "FlipsideCrypto", + "repo": "streamline-snowflake", + "run_id": "16729602656", + "slack_channel": "C087GJQ1ZHQ", + "enable_ai_analysis": true, + "ai_provider": "cortex", + "model_name": "mixtral-8x7b", + "ai_prompt": "Focus on dependency issues and provide quick fixes:", + "enable_auto_threading": true, + "username": "Pipeline Monitor", + "icon_emoji": ":stellar:" +}' --target dev +``` + +### Webhook Method + +```bash +dbt run-operation failed_gha_slack_alert --vars '{ + "owner": "FlipsideCrypto", + "repo": "streamline-snowflake", + "run_id": "16729602656", + "webhook_secret_name": "prod-alerts", + "enable_ai_analysis": true, + "ai_provider": "cortex", + "model_name": "mistral-large", + "username": "Production Monitor", + "icon_emoji": ":package:" +}' --target dev +``` + +### GitHub Actions Integration + +```yaml +- name: Notify Slack on Failure + if: failure() + run: | + dbt run-operation failed_gha_slack_alert --vars '{ + "owner": "${{ github.repository_owner }}", + "repo": "${{ github.event.repository.name }}", + "run_id": "${{ github.run_id }}", + "slack_channel": "C087GJQ1ZHQ", + "enable_ai_analysis": true, + "ai_provider": "cortex", + "model_name": "mistral-large", + "enable_auto_threading": true, + "username": "GitHub Actions", + "icon_emoji": ":github:" + }' --target dev +``` + +## Message Format + +### Failure Messages Include + +- **๐Ÿ”ด Red Sidebar**: Visual failure indicator +- **Header**: Repository name with failure indicator (โŒ) +- **Basic Info**: Run ID, failed job count, workflow name +- **๐Ÿค– AI Analysis**: Intelligent failure analysis with common patterns, root causes, and action items +- **๐Ÿ”— Action Button**: Direct link to workflow run +- **๐Ÿงต Threading** (if enabled): Individual job details and logs as thread replies + +### Success Messages Include + +- **๐ŸŸข Green Sidebar**: Visual success indicator +- **Header**: Repository name with success indicator (โœ…) +- **Basic Info**: Run ID, workflow name, success status +- **๐Ÿ”— Action Button**: Direct link to workflow run + +## AI Analysis + +The macro supports Snowflake's Cortex AI for intelligent failure analysis: + +### Cortex (Default) + +- Uses Snowflake's built-in Cortex AI +- **Requires `model_name` parameter** to specify which model to use +- Available models: `'mistral-large'`, `'mistral-7b'`, `'llama2-70b-chat'`, `'mixtral-8x7b'` +- Automatically analyzes logs and provides insights +- Custom prompts supported via `ai_prompt` parameter + +Enable AI analysis with: + +```yaml +"enable_ai_analysis": true, +"ai_provider": "cortex", +"model_name": "mistral-large", # Required! +"ai_prompt": "Focus on the most critical issues:" # Optional +``` + +## Environment Variables & Vault Setup + +### Webhook Method + +- `SLACK_WEBHOOK_URL`: Your Slack webhook URL (GitHub secret) + +### Bot Token Method + +- **No environment variables required!** +- Bot tokens are stored in Livequery vault at: `_FSC_SYS/SLACK/{bot_secret_name}` +- Channel ID provided as parameter in macro call + +### Vault Paths for Bot Tokens + +Store your bot tokens in these vault locations: + +- `prod/livequery/slack/intelligence` (default) +- `prod/livequery/alerts` (custom) +- `prod/livequery/` (custom) + +** The `_FSC/SYS/..` will not work anymore, because we are not able to access studio to store `CREDENTIALS` anymore. So the context + `_FSC/SYS/...` is deprecated. It's in the sql code for backward compatability. + +### How to Get Slack Channel IDs + +1. **Right-click method**: Right-click channel โ†’ Copy โ†’ Copy link (ID is in URL) +2. **API method**: Use `conversations.list` endpoint +3. **App method**: Channel IDs appear in URLs like `/C1234567890/` + +### Security Notes + +- Never hardcode secrets in your workflow files +- Use GitHub's encrypted secrets for webhook URLs +- Bot tokens automatically managed through Livequery vault system +- Channel IDs are not sensitive and can be stored in code + +## Troubleshooting + +### Common Issues + +1. **No notification sent**: Check webhook URL or channel ID parameter +2. **Invalid channel ID**: Must use channel ID (C1234567890), not name (#channel) +3. **AI analysis missing**: Ensure GitHub Actions integration is properly set up +4. **Message formatting issues**: Verify JSON syntax in custom_message parameter +5. **Bot permissions**: Ensure bot has `chat:write` scope for target channel +6. **Vault access**: Verify bot token stored at correct vault path + +### Debug Mode + +Add this step before the notification to debug issues: + +```yaml +- name: Debug Notification + run: | + echo "Owner: ${{ github.repository_owner }}" + echo "Repo: ${{ github.event.repository.name }}" + echo "Run ID: ${{ github.run_id }}" + echo "Channel: C1234567890" # Your actual channel ID +``` + +### Channel ID Validation + +Test if your channel ID is valid: + +```sql +SELECT slack_utils.validate_channel('C1234567890') as is_valid; +-- Should return true for valid channel IDs +``` + +## Integration with Livequery + +This macro integrates with Livequery's marketplace UDFs: + +- **`slack_utils.post_webhook()`**: For webhook-based notifications +- **`slack.post_message()`** & **`slack.post_reply()`**: For bot token messaging with threading +- **`github_actions.tf_failure_analysis_with_ai()`**: For AI-powered failure analysis + +### UDF Function Signatures + +```sql +-- Webhook (backward compatible) +slack_utils.post_webhook(webhook_secret_name, payload) + +-- Bot messaging (new parameter-based) +slack.post_message(channel_id, payload, bot_secret_name) +slack.post_reply(channel_id, thread_ts, payload, bot_secret_name) + +-- Or use 2-parameter versions (uses 'intelligence' bot token) +slack.post_message(channel_id, payload) +slack.post_reply(channel_id, thread_ts, payload) +``` + +Ensure these UDFs are deployed before using the notification macro. + +## Performance & Benefits + +### โšก **Lightning Fast Execution** + +- **Pure SQL**: No Python interpreter overhead +- **Direct UDF calls**: Leverages Livequery's optimized marketplace functions +- **Single transaction**: All operations in one dbt run-operation call +- **Instant feedback**: Real-time execution with immediate Slack delivery + +### ๐ŸŽฏ **Production Ready** + +- **Reliable**: Battle-tested with GitHub Actions workflows +- **Scalable**: Handles multiple failed jobs with threading +- **Secure**: Vault-based credential management +- **Flexible**: Supports both webhook and bot token methods + +### ๐Ÿค– **Intelligent Analysis** + +- **AI-Powered**: Cortex and Claude integration for failure analysis +- **Actionable Insights**: Common patterns, root causes, and prioritized action items +- **Context-Aware**: Includes job names, workflow details, and error logs +- **Formatted for Slack**: Optimized mrkdwn formatting for better readability + +The `failed_gha_slack_alert` macro provides enterprise-grade Slack notifications with zero deployment overhead and lightning-fast performance. + +## Examples Repository + +See [our examples repository](https://github.com/FlipsideCrypto/livequery-examples) for complete workflow configurations and advanced usage patterns. diff --git a/macros/alerts/slack_alert.sql b/macros/alerts/slack_alert.sql new file mode 100644 index 0000000..43b4887 --- /dev/null +++ b/macros/alerts/slack_alert.sql @@ -0,0 +1,309 @@ +{% macro failed_gha_slack_alert() %} + + {# Get parameters from vars #} + {%- set owner = var('owner') -%} + {%- set repo = var('repo') -%} + {%- set run_id = var('run_id') -%} + {%- set slack_channel = var('slack_channel', none) -%} + {%- set enable_ai_analysis = var('enable_ai_analysis', true) -%} + {%- set ai_provider = var('ai_provider', 'cortex') -%} + {%- set model_name = var('model_name', 'mistral-large') -%} + {%- set ai_prompt = var('ai_prompt', '') -%} + {%- set enable_auto_threading = var('enable_auto_threading', false) -%} + {%- set bot_secret_name = var('bot_secret_name', 'intelligence') -%} + {%- set webhook_secret_name = var('webhook_secret_name', none) -%} + {%- set username = var('username', 'GitHub Actions Bot') -%} + {%- set icon_emoji = var('icon_emoji', ':github:') -%} + {%- set icon_url = var('icon_url', none) -%} + + {%- set webhook_url = env_var('SLACK_WEBHOOK_URL', '') -%} + {%- set use_webhook = webhook_url != '' and webhook_secret_name -%} + + + {# Check if we have a valid slack channel #} + {%- if slack_channel -%} + {{ log("Using bot token method with channel: " ~ slack_channel, true) }} + {%- set use_webhook = false -%} + {%- elif not use_webhook -%} + {{ log("Error: Either SLACK_WEBHOOK_URL with webhook_secret_name or slack_channel must be provided", true) }} + {{ return("") }} + {%- endif -%} + + {%- if enable_ai_analysis -%} + {# Get failure data with AI analysis #} + {% set failure_query %} + SELECT + run_id, + ai_analysis, + total_failures, + failure_metadata + FROM TABLE(github_actions.tf_failure_analysis_with_ai('{{ owner }}', '{{ repo }}', '{{ run_id }}', '{{ ai_provider }}', '{{ model_name }}', '{{ ai_prompt }}')) + {% endset %} + + {%- set failure_results = run_query(failure_query) -%} + {%- set failure_data = failure_results.rows[0] if failure_results.rows else [] -%} + + {%- if failure_data -%} + {%- set total_failures = failure_data[2] -%} + {%- set ai_analysis = failure_data[1] -%} + {%- set failure_metadata = fromjson(failure_data[3]) if failure_data[3] else [] -%} + {%- else -%} + {%- set total_failures = 0 -%} + {%- set ai_analysis = none -%} + {%- set failure_metadata = [] -%} + {%- endif -%} + {%- else -%} + {# Get basic failure data without AI #} + {% set basic_query %} + SELECT + COUNT(*) as total_failures, + MAX(workflow_name) as workflow_name, + ARRAY_AGG(OBJECT_CONSTRUCT( + 'workflow_name', workflow_name, + 'job_name', job_name, + 'job_id', job_id, + 'job_url', job_url, + 'logs_preview', ARRAY_TO_STRING(failed_step_logs, '\n') + )) as failure_metadata + FROM TABLE(github_actions.tf_failed_jobs_with_logs('{{ owner }}', '{{ repo }}', '{{ run_id }}')) + {% endset %} + + {%- set basic_results = run_query(basic_query) -%} + {%- set basic_data = basic_results.rows[0] if basic_results.rows else [] -%} + + {%- if basic_data -%} + {%- set total_failures = basic_data[0] -%} + {%- set ai_analysis = none -%} + {%- set failure_metadata = fromjson(basic_data[2]) if basic_data[2] else [] -%} + {%- else -%} + {%- set total_failures = 0 -%} + {%- set ai_analysis = none -%} + {%- set failure_metadata = [] -%} + {%- endif -%} + {%- endif -%} + + {# Extract workflow name #} + {%- set workflow_name = failure_metadata[0].workflow_name if failure_metadata else repo -%} + + {# Build Slack message #} + {%- if total_failures == 0 -%} + {# Success message #} + {%- set message_blocks = [ + { + 'type': 'header', + 'text': {'type': 'plain_text', 'text': 'โœ… ' ~ workflow_name ~ ' - Success'} + }, + { + 'type': 'section', + 'fields': [ + {'type': 'mrkdwn', 'text': '*Run ID:* ' ~ run_id}, + {'type': 'mrkdwn', 'text': '*Workflow:* ' ~ workflow_name}, + {'type': 'mrkdwn', 'text': '*Status:* Success'} + ] + }, + { + 'type': 'actions', + 'elements': [{ + 'type': 'button', + 'text': {'type': 'plain_text', 'text': 'View Workflow'}, + 'url': 'https://github.com/' ~ owner ~ '/' ~ repo ~ '/actions/runs/' ~ run_id, + 'style': 'primary' + }] + } + ] -%} + + {%- set message_payload = { + 'text': 'โœ… GitHub Actions Success: ' ~ repo, + 'attachments': [{ + 'color': '#36a64f', + 'blocks': message_blocks + }] + } -%} + + {# Add customization for success messages at root level #} + {%- if username and username != 'none' -%} + {%- do message_payload.update({'username': username}) -%} + {%- endif -%} + {%- if icon_url and icon_url != 'none' and icon_url != '' -%} + {%- do message_payload.update({'icon_url': icon_url}) -%} + {%- elif icon_emoji and icon_emoji != 'none' -%} + {%- do message_payload.update({'icon_emoji': icon_emoji}) -%} + {%- endif -%} + {%- else -%} + {# Failure message #} + {%- set message_blocks = [ + { + 'type': 'header', + 'text': {'type': 'plain_text', 'text': ':red_circle: ' ~ workflow_name ~ ' - Failed'} + }, + { + 'type': 'section', + 'fields': [ + {'type': 'mrkdwn', 'text': '*Run ID:* ' ~ run_id}, + {'type': 'mrkdwn', 'text': '*Workflow:* ' ~ workflow_name}, + {'type': 'mrkdwn', 'text': '*Failed Jobs:* ' ~ total_failures} + ] + } + ] -%} + + {# Add AI analysis if available #} + {%- if enable_ai_analysis and ai_analysis -%} + {%- do message_blocks.append({ + 'type': 'section', + 'text': { + 'type': 'mrkdwn', + 'text': '*๐Ÿค– AI Analysis:*\n' ~ ai_analysis[:2900] + } + }) -%} + {%- endif -%} + + {# Add action button #} + {%- do message_blocks.append({ + 'type': 'actions', + 'elements': [{ + 'type': 'button', + 'text': {'type': 'plain_text', 'text': 'View Workflow'}, + 'url': 'https://github.com/' ~ owner ~ '/' ~ repo ~ '/actions/runs/' ~ run_id, + 'style': 'danger' + }] + }) -%} + + {%- set message_payload = { + 'text': 'โŒ GitHub Actions Failed: ' ~ repo, + 'attachments': [{ + 'color': '#d63638', + 'blocks': message_blocks + }] + } -%} + + {# Add customization for failure messages at root level #} + {%- if username and username != 'none' -%} + {%- do message_payload.update({'username': username}) -%} + {%- endif -%} + {%- if icon_url and icon_url != 'none' and icon_url != '' -%} + {%- do message_payload.update({'icon_url': icon_url}) -%} + {%- elif icon_emoji and icon_emoji != 'none' -%} + {%- do message_payload.update({'icon_emoji': icon_emoji}) -%} + {%- endif -%} + {%- endif -%} + + {# Send message #} + {%- if use_webhook -%} + {% set send_query %} + SELECT slack_utils.post_webhook('{{ webhook_secret_name }}', PARSE_JSON($${{ message_payload | tojson }}$$)) as result + {% endset %} + {%- else -%} + {% set send_query %} + SELECT slack.post_message('{{ slack_channel }}', PARSE_JSON($${{ message_payload | tojson }}$$), '{{ bot_secret_name }}') as result + {% endset %} + {%- endif -%} + + {%- set result = run_query(send_query) -%} + {{ log("Main message sent successfully", true) }} + + {# Handle threading for failures #} + {%- if enable_auto_threading and total_failures > 0 and not use_webhook and slack_channel -%} + {%- set main_response = fromjson(result.rows[0][0]) -%} + {%- set main_thread_ts = main_response.ts or (main_response.data and main_response.data.ts) -%} + + {{ log("Starting threading with " ~ failure_metadata|length ~ " jobs", true) }} + + {%- for job_meta in failure_metadata -%} + {%- set job_name = job_meta.job_name -%} + {%- set job_url = job_meta.job_url -%} + {%- set logs_preview = job_meta.logs_preview -%} + + {# Post job summary in thread #} + {%- set job_summary = { + 'text': 'Job Details: ' ~ job_name, + 'attachments': [{ + 'color': '#d63638', + 'blocks': [ + { + 'type': 'section', + 'fields': [ + {'type': 'mrkdwn', 'text': '*Job:* ' ~ job_name}, + {'type': 'mrkdwn', 'text': '*Status:* failure'} + ] + }, + { + 'type': 'actions', + 'elements': [{ + 'type': 'button', + 'text': {'type': 'plain_text', 'text': 'View Job'}, + 'url': job_url, + 'style': 'danger' + }] + } + ] + }] + } -%} + + {# Add customization to thread messages #} + {%- if username and username != 'none' -%} + {%- do job_summary.update({'username': username}) -%} + {%- endif -%} + {%- if icon_url and icon_url != 'none' and icon_url != '' -%} + {%- do job_summary.update({'icon_url': icon_url}) -%} + {%- elif icon_emoji and icon_emoji != 'none' -%} + {%- do job_summary.update({'icon_emoji': icon_emoji}) -%} + {%- endif -%} + + {% set job_thread_query %} + SELECT slack.post_reply('{{ slack_channel }}', '{{ main_thread_ts }}', PARSE_JSON($${{ job_summary | tojson }}$$), '{{ bot_secret_name }}') as result + {% endset %} + + {%- set job_result = run_query(job_thread_query) -%} + + {# Post logs as additional thread replies if available - split long logs #} + {%- if logs_preview and logs_preview != '' -%} + {%- set max_chunk_size = 2900 -%} + {%- set log_chunks = [] -%} + + {# Split logs into chunks #} + {%- for i in range(0, logs_preview|length, max_chunk_size) -%} + {%- set chunk = logs_preview[i:i+max_chunk_size] -%} + {%- do log_chunks.append(chunk) -%} + {%- endfor -%} + + {# Send each chunk as a separate thread message #} + {%- for chunk_idx in range(log_chunks|length) -%} + {%- set chunk = log_chunks[chunk_idx] -%} + {%- set chunk_header = '' -%} + + {# Add chunk header if multiple chunks #} + {%- if log_chunks|length > 1 -%} + {%- set chunk_header = '๐Ÿ“‹ Logs (' ~ (chunk_idx + 1) ~ '/' ~ log_chunks|length ~ '):\n' -%} + {%- else -%} + {%- set chunk_header = '๐Ÿ“‹ Logs:\n' -%} + {%- endif -%} + + {%- set log_message = {'text': chunk_header ~ '```\n' ~ chunk ~ '\n```'} -%} + + {# Add customization to log thread messages #} + {%- if username and username != 'none' -%} + {%- do log_message.update({'username': username}) -%} + {%- endif -%} + {%- if icon_url and icon_url != 'none' and icon_url != '' -%} + {%- do log_message.update({'icon_url': icon_url}) -%} + {%- elif icon_emoji and icon_emoji != 'none' -%} + {%- do log_message.update({'icon_emoji': icon_emoji}) -%} + {%- endif -%} + + {% set log_thread_query %} + SELECT slack.post_reply('{{ slack_channel }}', '{{ main_thread_ts }}', PARSE_JSON($${{ log_message | tojson }}$$), '{{ bot_secret_name }}') as result + {% endset %} + + {%- set log_result = run_query(log_thread_query) -%} + {%- endfor -%} + {%- endif -%} + + {{ log("Posted thread for job: " ~ job_name, true) }} + {%- endfor -%} + + {{ log("Threading completed for " ~ failure_metadata|length ~ " jobs", true) }} + {%- else -%} + {{ log("Message sent: " ~ result.rows[0][0] if result.rows else "No response", true) }} + {%- endif -%} + +{% endmacro %} diff --git a/macros/livequery/udf_compatibility.sql b/macros/livequery/udf_compatibility.sql new file mode 100644 index 0000000..15478aa --- /dev/null +++ b/macros/livequery/udf_compatibility.sql @@ -0,0 +1,26 @@ +{% macro check_udf_api_v2_exists() -%} +{# + Check if live.udf_api_v2 function exists at compile time + Returns true/false to control which UDF call to render + + Usage: + {% set v2_exists = check_udf_api_v2_exists() %} + {% if v2_exists %} + live.udf_api_v2(...) + {% else %} + live.udf_api(...) + {% endif %} +#} +{% set check_v2_query %} + SELECT COUNT(*) FROM information_schema.functions + WHERE function_name = 'UDF_API_V2' AND function_schema = 'LIVE' +{% endset %} + +{% if execute %} + {% set v2_exists = run_query(check_v2_query).rows[0][0] > 0 %} +{% else %} + {% set v2_exists = false %} +{% endif %} + +{{ return(v2_exists) }} +{%- endmacro -%} \ No newline at end of file diff --git a/macros/marketplace/alchemy/README.md b/macros/marketplace/alchemy/README.md new file mode 100644 index 0000000..cf61ace --- /dev/null +++ b/macros/marketplace/alchemy/README.md @@ -0,0 +1,288 @@ +# Alchemy API Integration + +Comprehensive blockchain data integration using Alchemy's powerful APIs for NFTs, tokens, transfers, and RPC calls across multiple networks. + +## Supported Networks + +- **Ethereum** (`eth-mainnet`) +- **Polygon** (`polygon-mainnet`) +- **Arbitrum** (`arb-mainnet`) +- **Optimism** (`opt-mainnet`) +- **Base** (`base-mainnet`) +- **And more** - Check [Alchemy's documentation](https://docs.alchemy.com/reference/api-overview) for the latest supported networks + +## Setup + +1. Get your Alchemy API key from [Alchemy Dashboard](https://dashboard.alchemy.com/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/ALCHEMY` + +3. Deploy the Alchemy marketplace functions: + ```bash + dbt run --models alchemy__ alchemy_utils__alchemy_utils + ``` + +## Core Functions + +### Utility Functions (`alchemy_utils` schema) + +#### `alchemy_utils.nfts_get(network, path, query_args)` +Make GET requests to Alchemy NFT API endpoints. + +#### `alchemy_utils.nfts_post(network, path, body)` +Make POST requests to Alchemy NFT API endpoints. + +#### `alchemy_utils.rpc(network, method, params)` +Make RPC calls to blockchain networks via Alchemy. + +### NFT Functions (`alchemy` schema) + +#### `alchemy.get_nfts_for_owner(network, owner[, query_args])` +Get all NFTs owned by an address. + +#### `alchemy.get_nft_metadata(network, contract_address, token_id)` +Get metadata for a specific NFT. + +#### `alchemy.get_nfts_for_collection(network, contract_address[, query_args])` +Get all NFTs in a collection. + +#### `alchemy.get_owners_for_nft(network, contract_address, token_id)` +Get all owners of a specific NFT. + +### Token Functions + +#### `alchemy.get_token_balances(network, owner[, contract_addresses])` +Get token balances for an address. + +#### `alchemy.get_token_metadata(network, contract_address)` +Get metadata for a token contract. + +### Transfer Functions + +#### `alchemy.get_asset_transfers(network, query_args)` +Get asset transfer data with flexible filtering. + +## Examples + +### NFT Queries + +#### Get NFTs for Owner +```sql +-- Get all NFTs owned by an address +SELECT alchemy.get_nfts_for_owner( + 'eth-mainnet', + '0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b' +); + +-- With pagination and filtering +SELECT alchemy.get_nfts_for_owner( + 'eth-mainnet', + '0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b', + { + 'pageSize': 100, + 'contractAddresses': ['0xBC4CA0EdA7647A8aB7C2061c2E118A18a936f13D'] -- BAYC + } +); +``` + +#### Get NFT Metadata +```sql +-- Get metadata for specific NFT +SELECT alchemy.get_nft_metadata( + 'eth-mainnet', + '0xBC4CA0EdA7647A8aB7C2061c2E118A18a936f13D', -- BAYC contract + '1234' -- Token ID +); +``` + +#### Get Collection NFTs +```sql +-- Get all NFTs in a collection +SELECT alchemy.get_nfts_for_collection( + 'eth-mainnet', + '0x60E4d786628Fea6478F785A6d7e704777c86a7c6', -- MAYC + { + 'pageSize': 50, + 'startToken': '0' + } +); +``` + +### Token Queries + +#### Get Token Balances +```sql +-- Get all token balances for an address +SELECT alchemy.get_token_balances( + 'eth-mainnet', + '0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b' +); + +-- Get specific token balances +SELECT alchemy.get_token_balances( + 'eth-mainnet', + '0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b', + ['0xA0b86a33E6417e8EdcfCfdD8fb59a3A5b3dB8BFD'] -- USDC +); +``` + +#### Get Token Metadata +```sql +-- Get token contract information +SELECT alchemy.get_token_metadata( + 'eth-mainnet', + '0xA0b86a33E6417e8EdcfCfdD8fb59a3A5b3dB8BFD' -- USDC +); +``` + +### Transfer Analysis + +#### Asset Transfers +```sql +-- Get recent transfers for an address +SELECT alchemy.get_asset_transfers( + 'eth-mainnet', + { + 'fromAddress': '0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b', + 'category': ['erc721', 'erc1155'], + 'maxCount': 100 + } +); + +-- Get transfers for a specific contract +SELECT alchemy.get_asset_transfers( + 'eth-mainnet', + { + 'contractAddresses': ['0xBC4CA0EdA7647A8aB7C2061c2E118A18a936f13D'], + 'category': ['erc721'], + 'fromBlock': '0x12A05F200', + 'toBlock': 'latest' + } +); +``` + +### RPC Calls + +#### Direct Blockchain Queries +```sql +-- Get latest block number +SELECT alchemy_utils.rpc( + 'eth-mainnet', + 'eth_blockNumber', + [] +); + +-- Get block by number +SELECT alchemy_utils.rpc( + 'eth-mainnet', + 'eth_getBlockByNumber', + ['0x12A05F200', true] +); + +-- Get transaction receipt +SELECT alchemy_utils.rpc( + 'eth-mainnet', + 'eth_getTransactionReceipt', + ['0x1234567890abcdef...'] +); +``` + +### Multi-Network Analysis + +#### Compare NFT Holdings Across Networks +```sql +-- Get BAYC holdings on Ethereum +WITH eth_nfts AS ( + SELECT 'ethereum' as network, alchemy.get_nfts_for_owner( + 'eth-mainnet', + '0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b' + ) as nfts +), +-- Get NFTs on Polygon +polygon_nfts AS ( + SELECT 'polygon' as network, alchemy.get_nfts_for_owner( + 'polygon-mainnet', + '0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b' + ) as nfts +) +SELECT network, nfts:totalCount::INTEGER as nft_count +FROM eth_nfts +UNION ALL +SELECT network, nfts:totalCount::INTEGER +FROM polygon_nfts; +``` + +### Advanced Analytics + +#### NFT Floor Price Tracking +```sql +-- Track collection stats over time +WITH collection_data AS ( + SELECT alchemy.get_nfts_for_collection( + 'eth-mainnet', + '0xBC4CA0EdA7647A8aB7C2061c2E118A18a936f13D', -- BAYC + {'pageSize': 1} + ) as collection_info +) +SELECT + collection_info:contract:name::STRING as collection_name, + collection_info:contract:totalSupply::INTEGER as total_supply, + CURRENT_TIMESTAMP as snapshot_time +FROM collection_data; +``` + +## Error Handling + +Handle API errors and rate limits: + +```sql +WITH api_response AS ( + SELECT alchemy.get_nfts_for_owner( + 'eth-mainnet', + '0xinvalid-address' + ) as response +) +SELECT + CASE + WHEN response:error IS NOT NULL THEN + CONCAT('API Error: ', response:error:message::STRING) + WHEN response:ownedNfts IS NOT NULL THEN + CONCAT('Success: Found ', ARRAY_SIZE(response:ownedNfts), ' NFTs') + ELSE + 'Unexpected response format' + END as result +FROM api_response; +``` + +## Rate Limiting + +Alchemy API has the following rate limits: +- **Free tier**: 300 requests per second +- **Growth tier**: 660 requests per second +- **Scale tier**: Custom limits + +The functions automatically handle rate limiting through Livequery's retry mechanisms. + +## Best Practices + +1. **Use pagination**: For large datasets, use `pageSize` and pagination tokens +2. **Filter requests**: Use `contractAddresses` to limit scope when possible +3. **Cache results**: Store frequently accessed data in tables +4. **Monitor usage**: Track API calls to stay within limits +5. **Network selection**: Choose the most relevant network for your use case + +## Supported Categories + +For asset transfers, use these categories: +- `erc20` - ERC-20 token transfers +- `erc721` - NFT transfers +- `erc1155` - Multi-token standard transfers +- `internal` - Internal ETH transfers +- `external` - External ETH transfers + +## API Documentation + +- [Alchemy API Reference](https://docs.alchemy.com/reference/api-overview) +- [NFT API](https://docs.alchemy.com/reference/nft-api-quickstart) +- [Token API](https://docs.alchemy.com/reference/token-api-quickstart) +- [Enhanced API Methods](https://docs.alchemy.com/reference/enhanced-api-quickstart) \ No newline at end of file diff --git a/macros/marketplace/alchemy/util_udfs.yaml.sql b/macros/marketplace/alchemy/util_udfs.yaml.sql index 384e2c0..95a77c7 100644 --- a/macros/marketplace/alchemy/util_udfs.yaml.sql +++ b/macros/marketplace/alchemy/util_udfs.yaml.sql @@ -3,7 +3,7 @@ This macro is used to generate the alchemy base endpoints #} -- name: {{ schema -}}.nfts_get +- name: {{ schema_name }}.nfts_get signature: - [NETWORK, STRING, The blockchain/network] - [PATH, STRING, The path starting with '/'] @@ -13,16 +13,35 @@ options: | COMMENT = $$Used to issue a 'GET' request to the Alchemy NFT API.$$ sql: | - SELECT - live.udf_api( + SELECT + {% set v2_exists = check_udf_api_v2_exists() %} + {% if v2_exists -%} + live.udf_api_v2( + 'GET', concat( 'https://', NETWORK,'.g.alchemy.com/nft/v2/{',NETWORK,'}', PATH, '?', utils.udf_object_to_url_query_string(QUERY_ARGS) ), + {'fsc-quantum-execution-mode': 'async'}, + {}, + '_FSC_SYS/ALCHEMY', + TRUE + ) + {%- else -%} + live.udf_api( + 'GET', + concat( + 'https://', NETWORK,'.g.alchemy.com/nft/v2/{',NETWORK,'}', PATH, '?', + utils.udf_object_to_url_query_string(QUERY_ARGS) + ), + {}, + {}, '_FSC_SYS/ALCHEMY' - ) as response + ) + {%- endif %} + as response -- name: {{ schema -}}.nfts_get +- name: {{ schema_name }}.nfts_get signature: - [NETWORK, STRING, The blockchain/network] - [VERSION, STRING, The version of the API to use] @@ -33,16 +52,35 @@ options: | COMMENT = $$Used to issue a 'GET' request to the Alchemy NFT API.$$ sql: | - SELECT - live.udf_api( + SELECT + {% set v2_exists = check_udf_api_v2_exists() %} + {% if v2_exists -%} + live.udf_api_v2( + 'GET', concat( 'https://', NETWORK,'.g.alchemy.com/nft/', VERSION, '/{',NETWORK,'}', PATH, '?', utils.udf_object_to_url_query_string(QUERY_ARGS) ), + {'fsc-quantum-execution-mode': 'async'}, + {}, + '_FSC_SYS/ALCHEMY', + TRUE + ) + {%- else -%} + live.udf_api( + 'GET', + concat( + 'https://', NETWORK,'.g.alchemy.com/nft/', VERSION, '/{',NETWORK,'}', PATH, '?', + utils.udf_object_to_url_query_string(QUERY_ARGS) + ), + {}, + {}, '_FSC_SYS/ALCHEMY' - ) as response + ) + {%- endif %} + as response -- name: {{ schema -}}.nfts_post +- name: {{ schema_name }}.nfts_post signature: - [NETWORK, STRING, The blockchain/network] - [PATH, STRING, The path starting with '/'] @@ -52,16 +90,29 @@ options: | COMMENT = $$Used to issue a 'POST' request to the Alchemy NFT API.$$ sql: | - SELECT + SELECT + {% set v2_exists = check_udf_api_v2_exists() %} + {% if v2_exists -%} + live.udf_api_v2( + 'POST', + concat('https://', NETWORK,'.g.alchemy.com/nft/v2/{',NETWORK,'}', PATH), + {'fsc-quantum-execution-mode': 'async'}, + BODY, + '_FSC_SYS/ALCHEMY', + TRUE + ) + {%- else -%} live.udf_api( 'POST', concat('https://', NETWORK,'.g.alchemy.com/nft/v2/{',NETWORK,'}', PATH), {}, BODY, '_FSC_SYS/ALCHEMY' - ) as response + ) + {%- endif %} + as response -- name: {{ schema -}}.rpc +- name: {{ schema_name }}.rpc signature: - [NETWORK, STRING, The blockchain/network] - [METHOD, STRING, The RPC method to call] @@ -71,10 +122,25 @@ options: | COMMENT = $$Used to issue an RPC call to Alchemy.$$ sql: | - SELECT live.udf_api( - 'POST', - concat('https://', NETWORK,'.g.alchemy.com/v2/{',NETWORK,'}'), - {}, - {'id': 1,'jsonrpc': '2.0','method': METHOD,'params': PARAMS}, - '_FSC_SYS/ALCHEMY') as response + SELECT + {% set v2_exists = check_udf_api_v2_exists() %} + {% if v2_exists -%} + live.udf_api_v2( + 'POST', + concat('https://', NETWORK,'.g.alchemy.com/v2/{',NETWORK,'}'), + {'fsc-quantum-execution-mode': 'async'}, + {'id': 1,'jsonrpc': '2.0','method': METHOD,'params': PARAMS}, + '_FSC_SYS/ALCHEMY', + TRUE + ) + {%- else -%} + live.udf_api( + 'POST', + concat('https://', NETWORK,'.g.alchemy.com/v2/{',NETWORK,'}'), + {}, + {'id': 1,'jsonrpc': '2.0','method': METHOD,'params': PARAMS}, + '_FSC_SYS/ALCHEMY' + ) + {%- endif %} + as response {% endmacro %} diff --git a/macros/marketplace/allday/README.md b/macros/marketplace/allday/README.md new file mode 100644 index 0000000..2fd8594 --- /dev/null +++ b/macros/marketplace/allday/README.md @@ -0,0 +1,36 @@ +# NBA All Day API Integration + +NBA All Day is Dapper Labs' basketball NFT platform, offering officially licensed NBA Moments as digital collectibles. + +## Setup + +1. Get your NBA All Day API key from [Dapper Labs developer portal](https://developers.dapperlabs.com/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/ALLDAY` + +3. Deploy the All Day marketplace functions: + ```bash + dbt run --models allday__ allday_utils__allday_utils + ``` + +## Functions + +### `allday.get(path, query_args)` +Make GET requests to NBA All Day API endpoints. + +## Examples + +```sql +-- Get NBA All Day collections +SELECT allday.get('/collections', {}); + +-- Get specific moment details +SELECT allday.get('/moments/12345', {}); + +-- Search for moments by player +SELECT allday.get('/moments', {'player_id': 'lebron-james'}); +``` + +## API Documentation + +- [NBA All Day API Documentation](https://developers.dapperlabs.com/) \ No newline at end of file diff --git a/macros/marketplace/apilayer/README.md b/macros/marketplace/apilayer/README.md new file mode 100644 index 0000000..b8d4985 --- /dev/null +++ b/macros/marketplace/apilayer/README.md @@ -0,0 +1,39 @@ +# API Layer Integration + +API Layer provides a comprehensive suite of APIs including currency conversion, geolocation, weather data, and more utility APIs. + +## Setup + +1. Get your API Layer API key from [API Layer Dashboard](https://apilayer.com/dashboard) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/APILAYER` + +3. Deploy the API Layer marketplace functions: + ```bash + dbt run --models apilayer__ apilayer_utils__apilayer_utils + ``` + +## Functions + +### `apilayer.get(path, query_args)` +Make GET requests to API Layer API endpoints. + +### `apilayer.post(path, body)` +Make POST requests to API Layer API endpoints. + +## Examples + +```sql +-- Get currency exchange rates +SELECT apilayer.get('/exchangerates_data/latest', {'base': 'USD', 'symbols': 'EUR,GBP,JPY'}); + +-- Get IP geolocation data +SELECT apilayer.get('/ip_api/check', {'ip': '8.8.8.8'}); + +-- Validate email address +SELECT apilayer.get('/email_validation/check', {'email': 'test@example.com'}); +``` + +## API Documentation + +- [API Layer Documentation](https://apilayer.com/marketplace) \ No newline at end of file diff --git a/macros/marketplace/binance/README.md b/macros/marketplace/binance/README.md new file mode 100644 index 0000000..79dd8a1 --- /dev/null +++ b/macros/marketplace/binance/README.md @@ -0,0 +1,39 @@ +# Binance API Integration + +Binance is the world's largest cryptocurrency exchange by trading volume, providing access to spot trading, futures, and market data. + +## Setup + +1. Get your Binance API key from [Binance API Management](https://www.binance.com/en/my/settings/api-management) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/BINANCE` + +3. Deploy the Binance marketplace functions: + ```bash + dbt run --models binance__ binance_utils__binance_utils + ``` + +## Functions + +### `binance.get(path, query_args)` +Make GET requests to Binance API endpoints. + +### `binance.post(path, body)` +Make POST requests to Binance API endpoints. + +## Examples + +```sql +-- Get current Bitcoin price +SELECT binance.get('/api/v3/ticker/price', {'symbol': 'BTCUSDT'}); + +-- Get 24hr ticker statistics +SELECT binance.get('/api/v3/ticker/24hr', {'symbol': 'ETHUSDT'}); + +-- Get order book depth +SELECT binance.get('/api/v3/depth', {'symbol': 'ADAUSDT', 'limit': 100}); +``` + +## API Documentation + +- [Binance API Documentation](https://binance-docs.github.io/apidocs/spot/en/) \ No newline at end of file diff --git a/macros/marketplace/bitquery/README.md b/macros/marketplace/bitquery/README.md new file mode 100644 index 0000000..2389851 --- /dev/null +++ b/macros/marketplace/bitquery/README.md @@ -0,0 +1,45 @@ +# Bitquery API Integration + +Bitquery provides GraphQL APIs for blockchain data across multiple networks including Bitcoin, Ethereum, Binance Smart Chain, and many others. + +## Setup + +1. Get your Bitquery API key from [Bitquery IDE](https://ide.bitquery.io/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/BITQUERY` + +3. Deploy the Bitquery marketplace functions: + ```bash + dbt run --models bitquery__ bitquery_utils__bitquery_utils + ``` + +## Functions + +### `bitquery.get(path, query_args)` +Make GET requests to Bitquery API endpoints. + +### `bitquery.post(path, body)` +Make POST requests to Bitquery API endpoints for GraphQL queries. + +## Examples + +```sql +-- Get Ethereum DEX trades +SELECT bitquery.post('/graphql', { + 'query': 'query { ethereum { dexTrades(date: {since: "2023-01-01"}) { count } } }' +}); + +-- Get Bitcoin transactions +SELECT bitquery.post('/graphql', { + 'query': 'query { bitcoin { transactions(date: {since: "2023-01-01"}) { count } } }' +}); + +-- Get token transfers on BSC +SELECT bitquery.post('/graphql', { + 'query': 'query { ethereum(network: bsc) { transfers(date: {since: "2023-01-01"}) { count } } }' +}); +``` + +## API Documentation + +- [Bitquery API Documentation](https://docs.bitquery.io/) \ No newline at end of file diff --git a/macros/marketplace/blockpour/README.md b/macros/marketplace/blockpour/README.md new file mode 100644 index 0000000..7c3dec6 --- /dev/null +++ b/macros/marketplace/blockpour/README.md @@ -0,0 +1,39 @@ +# Blockpour API Integration + +Blockpour provides blockchain infrastructure and data services with high-performance APIs for accessing on-chain data. + +## Setup + +1. Get your Blockpour API key from [Blockpour Dashboard](https://blockpour.com/dashboard) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/BLOCKPOUR` + +3. Deploy the Blockpour marketplace functions: + ```bash + dbt run --models blockpour__ blockpour_utils__blockpour_utils + ``` + +## Functions + +### `blockpour.get(path, query_args)` +Make GET requests to Blockpour API endpoints. + +### `blockpour.post(path, body)` +Make POST requests to Blockpour API endpoints. + +## Examples + +```sql +-- Get latest block information +SELECT blockpour.get('/api/v1/blocks/latest', {}); + +-- Get transaction details +SELECT blockpour.get('/api/v1/transactions/0x...', {}); + +-- Get token balances for an address +SELECT blockpour.get('/api/v1/addresses/0x.../tokens', {}); +``` + +## API Documentation + +- [Blockpour API Documentation](https://docs.blockpour.com/) \ No newline at end of file diff --git a/macros/marketplace/chainbase/README.md b/macros/marketplace/chainbase/README.md new file mode 100644 index 0000000..312eae9 --- /dev/null +++ b/macros/marketplace/chainbase/README.md @@ -0,0 +1,39 @@ +# Chainbase API Integration + +Chainbase provides comprehensive blockchain data infrastructure with APIs for accessing multi-chain data, NFTs, and DeFi protocols. + +## Setup + +1. Get your Chainbase API key from [Chainbase Console](https://console.chainbase.com/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/CHAINBASE` + +3. Deploy the Chainbase marketplace functions: + ```bash + dbt run --models chainbase__ chainbase_utils__chainbase_utils + ``` + +## Functions + +### `chainbase.get(path, query_args)` +Make GET requests to Chainbase API endpoints. + +### `chainbase.post(path, body)` +Make POST requests to Chainbase API endpoints. + +## Examples + +```sql +-- Get token metadata +SELECT chainbase.get('/v1/token/metadata', {'chain_id': 1, 'contract_address': '0x...'}); + +-- Get NFT collections +SELECT chainbase.get('/v1/nft/collections', {'chain_id': 1, 'page': 1, 'limit': 20}); + +-- Get account token balances +SELECT chainbase.get('/v1/account/tokens', {'chain_id': 1, 'address': '0x...', 'limit': 20}); +``` + +## API Documentation + +- [Chainbase API Documentation](https://docs.chainbase.com/) \ No newline at end of file diff --git a/macros/marketplace/chainstack/README.md b/macros/marketplace/chainstack/README.md new file mode 100644 index 0000000..882b5d3 --- /dev/null +++ b/macros/marketplace/chainstack/README.md @@ -0,0 +1,54 @@ +# Chainstack API Integration + +Chainstack provides managed blockchain infrastructure with high-performance nodes and APIs for multiple blockchain networks. + +## Setup + +1. Get your Chainstack API key from [Chainstack Console](https://console.chainstack.com/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/CHAINSTACK` + +3. Deploy the Chainstack marketplace functions: + ```bash + dbt run --models chainstack__ chainstack_utils__chainstack_utils + ``` + +## Functions + +### `chainstack.get(path, query_args)` +Make GET requests to Chainstack API endpoints. + +### `chainstack.post(path, body)` +Make POST requests to Chainstack API endpoints. + +## Examples + +```sql +-- Get latest block number +SELECT chainstack.post('/rpc', { + 'jsonrpc': '2.0', + 'method': 'eth_blockNumber', + 'params': [], + 'id': 1 +}); + +-- Get account balance +SELECT chainstack.post('/rpc', { + 'jsonrpc': '2.0', + 'method': 'eth_getBalance', + 'params': ['0x...', 'latest'], + 'id': 1 +}); + +-- Get transaction receipt +SELECT chainstack.post('/rpc', { + 'jsonrpc': '2.0', + 'method': 'eth_getTransactionReceipt', + 'params': ['0x...'], + 'id': 1 +}); +``` + +## API Documentation + +- [Chainstack API Documentation](https://docs.chainstack.com/) \ No newline at end of file diff --git a/macros/marketplace/claude/README.md b/macros/marketplace/claude/README.md new file mode 100644 index 0000000..20e560e --- /dev/null +++ b/macros/marketplace/claude/README.md @@ -0,0 +1,179 @@ +# Claude API Integration + +Anthropic's Claude AI integration for sophisticated text analysis, content generation, and reasoning tasks. This integration provides access to Claude's advanced language models through Snowflake UDFs. + +## Available Models + +- **Claude 3.5 Sonnet**: Latest and most capable model for complex tasks +- **Claude 3 Opus**: Powerful model for demanding use cases +- **Claude 3 Sonnet**: Balanced performance and speed +- **Claude 3 Haiku**: Fast and efficient for simple tasks + +Check [Anthropic's documentation](https://docs.anthropic.com/claude/docs/models-overview) for the latest available models. + +## Setup + +1. Get your Claude API key from [Anthropic Console](https://console.anthropic.com/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/CLAUDE` + +3. Deploy the Claude marketplace functions: + ```bash + dbt run --models claude__ claude_utils__claude_utils + ``` + +## Functions + +### `claude_utils.post(path, body)` +Make POST requests to Claude API endpoints. + +### `claude_utils.get(path)` +Make GET requests to Claude API endpoints. + +### `claude_utils.delete_method(path)` +Make DELETE requests to Claude API endpoints. + +### `claude.chat_completions(messages[, model, max_tokens, temperature])` +Send messages to Claude for chat completion. + +### `claude.extract_response_text(claude_response)` +Extract text content from Claude API responses. + +## Examples + +### Basic Chat +```sql +-- Simple conversation with Claude +SELECT claude.chat_completions([ + {'role': 'user', 'content': 'Explain quantum computing in simple terms'} +]); +``` + +### Chat with System Prompt +```sql +-- Chat with system message and conversation history +SELECT claude.chat_completions([ + {'role': 'system', 'content': 'You are a helpful data analyst.'}, + {'role': 'user', 'content': 'How do I optimize this SQL query?'}, + {'role': 'assistant', 'content': 'I can help you optimize your SQL query...'}, + {'role': 'user', 'content': 'SELECT * FROM large_table WHERE date > "2023-01-01"'} +]); +``` + +### Text Analysis +```sql +-- Analyze text sentiment and themes +SELECT claude.chat_completions([ + {'role': 'user', 'content': 'Analyze the sentiment and key themes in this customer feedback: "The product is okay but customer service was terrible. Took forever to get help."'} +]); +``` + +### Code Generation +```sql +-- Generate Python code +SELECT claude.chat_completions([ + {'role': 'user', 'content': 'Write a Python function to calculate the moving average of a list of numbers'} +]); +``` + +### Extract Response Text +```sql +-- Get just the text content from Claude's response +WITH claude_response AS ( + SELECT claude.chat_completions([ + {'role': 'user', 'content': 'What is machine learning?'} + ]) as response +) +SELECT claude.extract_response_text(response) as answer +FROM claude_response; +``` + +### Batch Text Processing +```sql +-- Process multiple texts +WITH texts AS ( + SELECT * FROM VALUES + ('Great product, highly recommend!'), + ('Terrible experience, would not buy again'), + ('Average quality, nothing special') + AS t(feedback) +) +SELECT + feedback, + claude.extract_response_text( + claude.chat_completions([ + {'role': 'user', 'content': CONCAT('Analyze sentiment (positive/negative/neutral): ', feedback)} + ]) + ) as sentiment +FROM texts; +``` + +### Different Models +```sql +-- Use specific Claude model +SELECT claude.chat_completions( + [{'role': 'user', 'content': 'Write a complex analysis of market trends'}], + 'claude-3-opus-20240229', -- Use Opus for complex reasoning + 2000, -- max_tokens + 0.3 -- temperature +); +``` + +## Integration with GitHub Actions + +This Claude integration is used by the GitHub Actions failure analysis system: + +```sql +-- Analyze GitHub Actions failures with Claude +SELECT claude.extract_response_text( + claude.chat_completions([ + {'role': 'user', 'content': CONCAT( + 'Analyze this CI/CD failure and provide root cause analysis: ', + error_logs + )} + ]) +) as ai_analysis +FROM github_failures; +``` + +## Error Handling + +Check for errors in Claude responses: + +```sql +WITH response AS ( + SELECT claude.chat_completions([ + {'role': 'user', 'content': 'Hello Claude'} + ]) as result +) +SELECT + CASE + WHEN result:error IS NOT NULL THEN result:error:message::STRING + ELSE claude.extract_response_text(result) + END as final_response +FROM response; +``` + +## Best Practices + +1. **Use appropriate models**: Haiku for simple tasks, Opus for complex reasoning +2. **Set token limits**: Control costs with reasonable `max_tokens` values +3. **Temperature control**: Lower values (0.1-0.3) for factual tasks, higher (0.7-1.0) for creative tasks +4. **Context management**: Include relevant conversation history for better responses +5. **Error handling**: Always check for API errors in responses + +## Rate Limiting + +Claude API has usage limits based on your plan. The functions automatically handle rate limiting through Livequery's retry mechanisms. + +## Security + +- API keys are securely stored in Snowflake secrets +- All communication uses HTTPS encryption +- No sensitive data is logged or cached + +## API Documentation + +- [Claude API Reference](https://docs.anthropic.com/claude/reference/getting-started-with-the-api) +- [Model Comparison](https://docs.anthropic.com/claude/docs/models-overview) +- [Usage Guidelines](https://docs.anthropic.com/claude/docs/use-case-guides) \ No newline at end of file diff --git a/macros/marketplace/claude/messages_batch_udfs.yaml.sql b/macros/marketplace/claude/messages_batch_udfs.yaml.sql index b3cbfd2..fe54b77 100644 --- a/macros/marketplace/claude/messages_batch_udfs.yaml.sql +++ b/macros/marketplace/claude/messages_batch_udfs.yaml.sql @@ -12,7 +12,7 @@ options: | COMMENT = $$Send a batch of messages to Claude and get responses [API docs: Messages Batch](https://docs.anthropic.com/en/api/creating-message-batches)$$ sql: | - SELECT claude_utils.post( + SELECT claude_utils.post_api( '/v1/messages/batches', MESSAGES ) as response @@ -26,7 +26,7 @@ options: | COMMENT = $$Retrieve details of a specific Message Batch [API docs: Retrieve Message Batch](https://docs.anthropic.com/en/api/retrieving-message-batches)$$ sql: | - SELECT claude_utils.get( + SELECT claude_utils.get_api( CONCAT('/v1/messages/batches/', MESSAGE_BATCH_ID) ) as response @@ -38,7 +38,7 @@ options: | COMMENT = $$Retrieve results of a Message Batch [API docs: Retrieve Message Batch Results](https://docs.anthropic.com/en/api/retrieving-message-batches)$$ sql: | - SELECT claude_utils.get( + SELECT claude_utils.get_api( CONCAT('/v1/messages/batches/', MESSAGE_BATCH_ID, '/results') ) as response @@ -49,7 +49,7 @@ options: | COMMENT = $$List all Message Batches [API docs: List Message Batches](https://docs.anthropic.com/en/api/retrieving-message-batches)$$ sql: | - SELECT claude_utils.get( + SELECT claude_utils.get_api( '/v1/messages/batches' ) as response @@ -62,7 +62,7 @@ options: | COMMENT = $$List all Message Batches [API docs: List Message Batches](https://docs.anthropic.com/en/api/retrieving-message-batches)$$ sql: | - SELECT claude_utils.get( + SELECT claude_utils.get_api( CONCAT('/v1/messages/batches', '?before_id=', COALESCE(BEFORE_ID, ''), '&limit=', COALESCE(LIMIT::STRING, '') @@ -78,7 +78,7 @@ options: | COMMENT = $$List all Message Batches [API docs: List Message Batches](https://docs.anthropic.com/en/api/retrieving-message-batches)$$ sql: | - SELECT claude_utils.get( + SELECT claude_utils.get_api( CONCAT('/v1/messages/batches', '?after_id=', COALESCE(AFTER_ID, ''), '&limit=', COALESCE(LIMIT::STRING, '') @@ -92,7 +92,7 @@ options: | COMMENT = $$Cancel a Message Batch [API docs: Cancel Message Batch](https://docs.anthropic.com/en/api/retrieving-message-batches)$$ sql: | - SELECT claude_utils.post( + SELECT claude_utils.post_api( CONCAT('/v1/messages/batches/', MESSAGE_BATCH_ID, '/cancel'), {} ) as response diff --git a/macros/marketplace/claude/messages_udfs.yaml.sql b/macros/marketplace/claude/messages_udfs.yaml.sql index 4678a0b..fab9a31 100644 --- a/macros/marketplace/claude/messages_udfs.yaml.sql +++ b/macros/marketplace/claude/messages_udfs.yaml.sql @@ -12,7 +12,7 @@ options: | COMMENT = $$Send a message to Claude and get a response [API docs: Messages](https://docs.anthropic.com/claude/reference/messages_post)$$ sql: | - SELECT claude_utils.post( + SELECT claude_utils.post_api( '/v1/messages', { 'model': 'claude-3-5-sonnet-20241022', @@ -31,7 +31,7 @@ options: | COMMENT = $$Send a message to Claude and get a response [API docs: Messages](https://docs.anthropic.com/claude/reference/messages_post)$$ sql: | - SELECT claude_utils.post( + SELECT claude_utils.post_api( '/v1/messages', { 'model': COALESCE(MODEL, 'claude-3-5-sonnet-20241022'), @@ -54,7 +54,7 @@ options: | COMMENT = $$Send a message to Claude and get a response [API docs: Messages](https://docs.anthropic.com/claude/reference/messages_post)$$ sql: | - SELECT claude_utils.post( + SELECT claude_utils.post_api( '/v1/messages', { 'model': MODEL, @@ -76,7 +76,7 @@ options: | COMMENT = $$Count tokens in a message array before sending to Claude [API docs: Count Tokens](https://docs.anthropic.com/claude/reference/counting-tokens)$$ sql: | - SELECT claude_utils.post( + SELECT claude_utils.post_api( '/v1/messages/count_tokens', { 'model': COALESCE(MODEL, 'claude-3-5-sonnet-20241022'), diff --git a/macros/marketplace/claude/models_udfs.yaml.sql b/macros/marketplace/claude/models_udfs.yaml.sql index 023f19b..e589d97 100644 --- a/macros/marketplace/claude/models_udfs.yaml.sql +++ b/macros/marketplace/claude/models_udfs.yaml.sql @@ -11,7 +11,7 @@ options: | COMMENT = $$List available Claude models [API docs: List Models](https://docs.anthropic.com/claude/reference/models_get)$$ sql: | - SELECT claude_utils.get( + SELECT claude_utils.get_api( '/v1/models' ) as response @@ -23,7 +23,7 @@ options: | COMMENT = $$Get details for a specific Claude model [API docs: Get Model](https://docs.anthropic.com/claude/reference/models_retrieve)$$ sql: | - SELECT claude_utils.get( + SELECT claude_utils.get_api( CONCAT('/v1/models/', MODEL) ) as response diff --git a/macros/marketplace/claude/util_udfs.yaml.sql b/macros/marketplace/claude/util_udfs.yaml.sql index ed807ed..189d944 100644 --- a/macros/marketplace/claude/util_udfs.yaml.sql +++ b/macros/marketplace/claude/util_udfs.yaml.sql @@ -2,7 +2,7 @@ {# This macro is used to generate API calls to Claude API endpoints #} -- name: {{ schema_name -}}.post +- name: {{ schema_name -}}.post_api signature: - [PATH, STRING, The API endpoint path] - [BODY, OBJECT, The request body] @@ -11,7 +11,10 @@ options: | COMMENT = $$Make calls to Claude API [API docs: Claude](https://docs.anthropic.com/claude/reference/getting-started-with-the-api)$$ sql: | - SELECT live.udf_api( + SELECT + {% set v2_exists = check_udf_api_v2_exists() %} + {% if v2_exists -%} + live.udf_api_v2( 'POST', CONCAT('https://api.anthropic.com', PATH), { @@ -20,10 +23,31 @@ 'content-type': 'application/json' }, BODY, - '_FSC_SYS/CLAUDE' - ) as response + IFF(_utils.udf_whoami() <> CURRENT_USER(), + '_FSC_SYS/CLAUDE', + 'Vault/prod/data_platform/claude' + ), + TRUE + ) + {%- else -%} + live.udf_api( + 'POST', + CONCAT('https://api.anthropic.com', PATH), + { + 'anthropic-version': '2023-06-01', + 'x-api-key': '{API_KEY}', + 'content-type': 'application/json' + }, + BODY, + IFF(_utils.udf_whoami() <> CURRENT_USER(), + '_FSC_SYS/CLAUDE', + 'Vault/prod/data_platform/claude' + ) + ) + {%- endif %} + as response -- name: {{ schema_name -}}.get +- name: {{ schema_name -}}.get_api signature: - [PATH, STRING, The API endpoint path] return_type: @@ -31,7 +55,10 @@ options: | COMMENT = $$Make GET requests to Claude API [API docs: Get](https://docs.anthropic.com/claude/reference/get)$$ sql: | - SELECT live.udf_api( + SELECT + {% set v2_exists = check_udf_api_v2_exists() %} + {% if v2_exists -%} + live.udf_api_v2( 'GET', CONCAT('https://api.anthropic.com', PATH), { @@ -40,8 +67,29 @@ 'content-type': 'application/json' }, NULL, - '_FSC_SYS/CLAUDE' - ) as response + IFF(_utils.udf_whoami() <> CURRENT_USER(), + '_FSC_SYS/CLAUDE', + 'Vault/prod/data_platform/claude' + ), + TRUE + ) + {%- else -%} + live.udf_api( + 'GET', + CONCAT('https://api.anthropic.com', PATH), + { + 'anthropic-version': '2023-06-01', + 'x-api-key': '{API_KEY}', + 'content-type': 'application/json' + }, + NULL, + IFF(_utils.udf_whoami() <> CURRENT_USER(), + '_FSC_SYS/CLAUDE', + 'Vault/prod/data_platform/claude' + ) + ) + {%- endif %} + as response - name: {{ schema_name -}}.delete_method signature: @@ -51,7 +99,10 @@ options: | COMMENT = $$Make DELETE requests to Claude API [API docs: Delete](https://docs.anthropic.com/claude/reference/delete)$$ sql: | - SELECT live.udf_api( + SELECT + {% set v2_exists = check_udf_api_v2_exists() %} + {% if v2_exists -%} + live.udf_api_v2( 'DELETE', CONCAT('https://api.anthropic.com', PATH), { @@ -60,6 +111,27 @@ 'content-type': 'application/json' }, NULL, - '_FSC_SYS/CLAUDE' - ) as response + IFF(_utils.udf_whoami() <> CURRENT_USER(), + '_FSC_SYS/CLAUDE', + 'Vault/prod/data_platform/claude' + ), + TRUE + ) + {%- else -%} + live.udf_api( + 'DELETE', + CONCAT('https://api.anthropic.com', PATH), + { + 'anthropic-version': '2023-06-01', + 'x-api-key': '{API_KEY}', + 'content-type': 'application/json' + }, + NULL, + IFF(_utils.udf_whoami() <> CURRENT_USER(), + '_FSC_SYS/CLAUDE', + 'Vault/prod/data_platform/claude' + ) + ) + {%- endif %} + as response {% endmacro %} diff --git a/macros/marketplace/cmc/README.md b/macros/marketplace/cmc/README.md new file mode 100644 index 0000000..c2cd13e --- /dev/null +++ b/macros/marketplace/cmc/README.md @@ -0,0 +1,36 @@ +# CoinMarketCap API Integration + +CoinMarketCap is a leading cryptocurrency market data platform providing real-time and historical cryptocurrency prices, market capitalizations, and trading volumes. + +## Setup + +1. Get your CoinMarketCap API key from [CoinMarketCap Pro API](https://pro.coinmarketcap.com/account) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/CMC` + +3. Deploy the CoinMarketCap marketplace functions: + ```bash + dbt run --models cmc__ cmc_utils__cmc_utils + ``` + +## Functions + +### `cmc.get(path, query_args)` +Make GET requests to CoinMarketCap API endpoints. + +## Examples + +```sql +-- Get latest cryptocurrency listings +SELECT cmc.get('/v1/cryptocurrency/listings/latest', {'limit': 100}); + +-- Get specific cryptocurrency quotes +SELECT cmc.get('/v2/cryptocurrency/quotes/latest', {'symbol': 'BTC,ETH,ADA'}); + +-- Get cryptocurrency metadata +SELECT cmc.get('/v2/cryptocurrency/info', {'symbol': 'BTC'}); +``` + +## API Documentation + +- [CoinMarketCap API Documentation](https://coinmarketcap.com/api/documentation/v1/) \ No newline at end of file diff --git a/macros/marketplace/coingecko/README.md b/macros/marketplace/coingecko/README.md new file mode 100644 index 0000000..52350a6 --- /dev/null +++ b/macros/marketplace/coingecko/README.md @@ -0,0 +1,76 @@ +# CoinGecko API Integration + +Comprehensive cryptocurrency market data integration using CoinGecko's Pro API for prices, market data, and trading information. + +## Setup + +1. Get your CoinGecko Pro API key from [CoinGecko Pro](https://pro.coingecko.com/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/COINGECKO` + +3. Deploy the CoinGecko marketplace functions: + ```bash + dbt run --models coingecko__ coingecko_utils__coingecko_utils + ``` + +## Functions + +### `coingecko.get(path, query_args)` +Make GET requests to CoinGecko Pro API endpoints. + +### `coingecko.post(path, body)` +Make POST requests to CoinGecko Pro API endpoints. + +## Examples + +### Price Data +```sql +-- Get current price for Bitcoin +SELECT coingecko.get('/api/v3/simple/price', { + 'ids': 'bitcoin', + 'vs_currencies': 'usd,eth', + 'include_24hr_change': 'true' +}); + +-- Get historical prices +SELECT coingecko.get('/api/v3/coins/bitcoin/history', { + 'date': '30-12-2023' +}); +``` + +### Market Data +```sql +-- Get top cryptocurrencies by market cap +SELECT coingecko.get('/api/v3/coins/markets', { + 'vs_currency': 'usd', + 'order': 'market_cap_desc', + 'per_page': 100, + 'page': 1 +}); + +-- Get global cryptocurrency statistics +SELECT coingecko.get('/api/v3/global', {}); +``` + +### Token Information +```sql +-- Get detailed coin information +SELECT coingecko.get('/api/v3/coins/ethereum', { + 'localization': 'false', + 'tickers': 'false', + 'market_data': 'true', + 'community_data': 'true' +}); +``` + +## Rate Limiting + +CoinGecko Pro API limits: +- **Basic**: 10,000 calls/month +- **Premium**: 50,000 calls/month +- **Enterprise**: Custom limits + +## API Documentation + +- [CoinGecko Pro API Documentation](https://apiguide.coingecko.com/getting-started/introduction) +- [API Endpoints Reference](https://docs.coingecko.com/reference/introduction) \ No newline at end of file diff --git a/macros/marketplace/covalent/README.md b/macros/marketplace/covalent/README.md new file mode 100644 index 0000000..2a22546 --- /dev/null +++ b/macros/marketplace/covalent/README.md @@ -0,0 +1,36 @@ +# Covalent API Integration + +Covalent provides a unified API to access rich blockchain data across multiple networks, offering historical and real-time data for wallets, transactions, and DeFi protocols. + +## Setup + +1. Get your Covalent API key from [Covalent Dashboard](https://www.covalenthq.com/platform/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/COVALENT` + +3. Deploy the Covalent marketplace functions: + ```bash + dbt run --models covalent__ covalent_utils__covalent_utils + ``` + +## Functions + +### `covalent.get(path, query_args)` +Make GET requests to Covalent API endpoints. + +## Examples + +```sql +-- Get token balances for an address +SELECT covalent.get('/v1/1/address/0x.../balances_v2/', {}); + +-- Get transaction history for an address +SELECT covalent.get('/v1/1/address/0x.../transactions_v2/', {'page-size': 100}); + +-- Get NFTs owned by an address +SELECT covalent.get('/v1/1/address/0x.../balances_nft/', {}); +``` + +## API Documentation + +- [Covalent API Documentation](https://www.covalenthq.com/docs/api/) \ No newline at end of file diff --git a/macros/marketplace/credmark/README.md b/macros/marketplace/credmark/README.md new file mode 100644 index 0000000..007ab0e --- /dev/null +++ b/macros/marketplace/credmark/README.md @@ -0,0 +1,39 @@ +# Credmark API Integration + +Credmark provides DeFi risk modeling and analytics APIs with comprehensive data on lending protocols, token prices, and risk metrics. + +## Setup + +1. Get your Credmark API key from [Credmark Portal](https://gateway.credmark.com/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/CREDMARK` + +3. Deploy the Credmark marketplace functions: + ```bash + dbt run --models credmark__ credmark_utils__credmark_utils + ``` + +## Functions + +### `credmark.get(path, query_args)` +Make GET requests to Credmark API endpoints. + +### `credmark.post(path, body)` +Make POST requests to Credmark API endpoints. + +## Examples + +```sql +-- Get token price +SELECT credmark.get('/v1/model/token.price', {'token_address': '0x...', 'block_number': 'latest'}); + +-- Get portfolio risk metrics +SELECT credmark.post('/v1/model/finance.var-portfolio', {'addresses': ['0x...'], 'window': 30}); + +-- Get lending pool information +SELECT credmark.get('/v1/model/compound-v2.pool-info', {'token_address': '0x...'}); +``` + +## API Documentation + +- [Credmark API Documentation](https://docs.credmark.com/) \ No newline at end of file diff --git a/macros/marketplace/dapplooker/README.md b/macros/marketplace/dapplooker/README.md new file mode 100644 index 0000000..8ff58fe --- /dev/null +++ b/macros/marketplace/dapplooker/README.md @@ -0,0 +1,39 @@ +# DappLooker API Integration + +DappLooker provides blockchain analytics and data visualization platform with APIs for accessing DeFi, NFT, and on-chain metrics across multiple networks. + +## Setup + +1. Get your DappLooker API key from [DappLooker Dashboard](https://dapplooker.com/dashboard) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/DAPPLOOKER` + +3. Deploy the DappLooker marketplace functions: + ```bash + dbt run --models dapplooker__ dapplooker_utils__dapplooker_utils + ``` + +## Functions + +### `dapplooker.get(path, query_args)` +Make GET requests to DappLooker API endpoints. + +### `dapplooker.post(path, body)` +Make POST requests to DappLooker API endpoints. + +## Examples + +```sql +-- Get DeFi protocol metrics +SELECT dapplooker.get('/api/v1/defi/protocols', {'network': 'ethereum'}); + +-- Get NFT collection statistics +SELECT dapplooker.get('/api/v1/nft/collections/stats', {'collection': '0x...'}); + +-- Get wallet analytics +SELECT dapplooker.get('/api/v1/wallet/analytics', {'address': '0x...', 'network': 'ethereum'}); +``` + +## API Documentation + +- [DappLooker API Documentation](https://docs.dapplooker.com/) \ No newline at end of file diff --git a/macros/marketplace/dappradar/README.md b/macros/marketplace/dappradar/README.md new file mode 100644 index 0000000..f9962f4 --- /dev/null +++ b/macros/marketplace/dappradar/README.md @@ -0,0 +1,36 @@ +# DappRadar API Integration + +DappRadar is a leading DApp analytics platform providing comprehensive data on decentralized applications, DeFi protocols, NFT collections, and blockchain games. + +## Setup + +1. Get your DappRadar API key from [DappRadar API Dashboard](https://dappradar.com/api) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/DAPPRADAR` + +3. Deploy the DappRadar marketplace functions: + ```bash + dbt run --models dappradar__ dappradar_utils__dappradar_utils + ``` + +## Functions + +### `dappradar.get(path, query_args)` +Make GET requests to DappRadar API endpoints. + +## Examples + +```sql +-- Get top DApps by category +SELECT dappradar.get('/dapps', {'chain': 'ethereum', 'category': 'defi', 'limit': 50}); + +-- Get DApp details +SELECT dappradar.get('/dapps/1', {}); + +-- Get NFT collection rankings +SELECT dappradar.get('/nft/collections', {'chain': 'ethereum', 'range': '24h', 'limit': 100}); +``` + +## API Documentation + +- [DappRadar API Documentation](https://docs.dappradar.com/) \ No newline at end of file diff --git a/macros/marketplace/deepnftvalue/README.md b/macros/marketplace/deepnftvalue/README.md new file mode 100644 index 0000000..ad4458d --- /dev/null +++ b/macros/marketplace/deepnftvalue/README.md @@ -0,0 +1,39 @@ +# DeepNFTValue API Integration + +DeepNFTValue provides AI-powered NFT valuation and analytics services, offering price predictions and market insights for NFT collections. + +## Setup + +1. Get your DeepNFTValue API key from [DeepNFTValue Dashboard](https://deepnftvalue.com/dashboard) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/DEEPNFTVALUE` + +3. Deploy the DeepNFTValue marketplace functions: + ```bash + dbt run --models deepnftvalue__ deepnftvalue_utils__deepnftvalue_utils + ``` + +## Functions + +### `deepnftvalue.get(path, query_args)` +Make GET requests to DeepNFTValue API endpoints. + +### `deepnftvalue.post(path, body)` +Make POST requests to DeepNFTValue API endpoints. + +## Examples + +```sql +-- Get NFT valuation +SELECT deepnftvalue.get('/api/v1/valuation', {'contract_address': '0x...', 'token_id': '1234'}); + +-- Get collection analytics +SELECT deepnftvalue.get('/api/v1/collection/analytics', {'contract_address': '0x...'}); + +-- Get price predictions +SELECT deepnftvalue.post('/api/v1/predict', {'contract_address': '0x...', 'token_ids': [1, 2, 3]}); +``` + +## API Documentation + +- [DeepNFTValue API Documentation](https://docs.deepnftvalue.com/) \ No newline at end of file diff --git a/macros/marketplace/defillama/README.md b/macros/marketplace/defillama/README.md new file mode 100644 index 0000000..d047040 --- /dev/null +++ b/macros/marketplace/defillama/README.md @@ -0,0 +1,90 @@ +# DefiLlama API Integration + +DeFi analytics and TVL (Total Value Locked) data integration using DefiLlama's comprehensive DeFi protocol database. + +## Setup + +1. Most DefiLlama endpoints are free and don't require an API key + +2. For premium endpoints, get your API key from [DefiLlama](https://defillama.com/docs/api) + +3. Store the API key in Snowflake secrets under `_FSC_SYS/DEFILLAMA` (if using premium features) + +4. Deploy the DefiLlama marketplace functions: + ```bash + dbt run --models defillama__ defillama_utils__defillama_utils + ``` + +## Functions + +### `defillama.get(path, query_args)` +Make GET requests to DefiLlama API endpoints. + +## Examples + +### Protocol TVL Data +```sql +-- Get current TVL for all protocols +SELECT defillama.get('/protocols', {}); + +-- Get specific protocol information +SELECT defillama.get('/protocol/uniswap', {}); + +-- Get historical TVL for a protocol +SELECT defillama.get('/protocol/aave', {}); +``` + +### Chain TVL Data +```sql +-- Get TVL for all chains +SELECT defillama.get('/chains', {}); + +-- Get historical TVL for Ethereum +SELECT defillama.get('/historicalChainTvl/Ethereum', {}); +``` + +### Yield Farming Data +```sql +-- Get current yields +SELECT defillama.get('/yields', {}); + +-- Get yields for specific protocol +SELECT defillama.get('/yields/project/aave', {}); +``` + +### Token Pricing +```sql +-- Get current token prices +SELECT defillama.get('/prices/current/ethereum:0xA0b86a33E6417e8EdcfCfdD8fb59a3A5b3dB8BFD', {}); + +-- Get historical token prices +SELECT defillama.get('/prices/historical/1640995200/ethereum:0xA0b86a33E6417e8EdcfCfdD8fb59a3A5b3dB8BFD', {}); +``` + +### Stablecoin Data +```sql +-- Get stablecoin market caps +SELECT defillama.get('/stablecoins', {}); + +-- Get specific stablecoin information +SELECT defillama.get('/stablecoin/1', {}); -- USDT +``` + +### Bridge Data +```sql +-- Get bridge volumes +SELECT defillama.get('/bridges', {}); + +-- Get specific bridge information +SELECT defillama.get('/bridge/1', {}); +``` + +## Rate Limiting + +DefiLlama API is generally rate-limited to prevent abuse. Most endpoints are free to use. + +## API Documentation + +- [DefiLlama API Documentation](https://defillama.com/docs/api) +- [TVL API](https://defillama.com/docs/api#operations-tag-TVL) +- [Yields API](https://defillama.com/docs/api#operations-tag-Yields) \ No newline at end of file diff --git a/macros/marketplace/dune/README.md b/macros/marketplace/dune/README.md new file mode 100644 index 0000000..b9181d0 --- /dev/null +++ b/macros/marketplace/dune/README.md @@ -0,0 +1,74 @@ +# Dune Analytics API Integration + +Access Dune Analytics queries and results directly from Snowflake for blockchain data analysis and visualization. + +## Setup + +1. Get your Dune API key from [Dune Analytics](https://dune.com/settings/api) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/DUNE` + +3. Deploy the Dune marketplace functions: + ```bash + dbt run --models dune__ dune_utils__dune_utils + ``` + +## Functions + +### `dune.get(path, query_args)` +Make GET requests to Dune API endpoints. + +### `dune.post(path, body)` +Make POST requests to Dune API endpoints. + +## Examples + +### Execute Queries +```sql +-- Execute a Dune query +SELECT dune.post('/api/v1/query/1234567/execute', { + 'query_parameters': { + 'token_address': '0xA0b86a33E6417e8EdcfCfdD8fb59a3A5b3dB8BFD' + } +}); +``` + +### Get Query Results +```sql +-- Get results from executed query +SELECT dune.get('/api/v1/execution/01234567-89ab-cdef-0123-456789abcdef/results', {}); + +-- Get latest results for a query +SELECT dune.get('/api/v1/query/1234567/results', {}); +``` + +### Query Status +```sql +-- Check execution status +SELECT dune.get('/api/v1/execution/01234567-89ab-cdef-0123-456789abcdef/status', {}); +``` + +### Parameterized Queries +```sql +-- Execute query with parameters +SELECT dune.post('/api/v1/query/1234567/execute', { + 'query_parameters': { + 'start_date': '2023-01-01', + 'end_date': '2023-12-31', + 'min_amount': 1000 + } +}); +``` + +## Rate Limiting + +Dune API rate limits vary by plan: +- **Free**: 20 executions per day +- **Plus**: 1,000 executions per day +- **Premium**: 10,000 executions per day + +## API Documentation + +- [Dune API Documentation](https://dune.com/docs/api/) +- [Authentication](https://dune.com/docs/api/api-reference/authentication/) +- [Query Execution](https://dune.com/docs/api/api-reference/execute-queries/) \ No newline at end of file diff --git a/macros/marketplace/espn/README.md b/macros/marketplace/espn/README.md new file mode 100644 index 0000000..0394c16 --- /dev/null +++ b/macros/marketplace/espn/README.md @@ -0,0 +1,36 @@ +# ESPN API Integration + +ESPN provides comprehensive sports data including scores, schedules, player statistics, and news across multiple sports leagues. + +## Setup + +1. Get your ESPN API key from [ESPN Developer Portal](https://developer.espn.com/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/ESPN` + +3. Deploy the ESPN marketplace functions: + ```bash + dbt run --models espn__ espn_utils__espn_utils + ``` + +## Functions + +### `espn.get(path, query_args)` +Make GET requests to ESPN API endpoints. + +## Examples + +```sql +-- Get NFL scores +SELECT espn.get('/v1/sports/football/nfl/scoreboard', {}); + +-- Get NBA team roster +SELECT espn.get('/v1/sports/basketball/nba/teams/1/roster', {}); + +-- Get MLB standings +SELECT espn.get('/v1/sports/baseball/mlb/standings', {}); +``` + +## API Documentation + +- [ESPN API Documentation](https://site.api.espn.com/apis/site/v2/sports/) \ No newline at end of file diff --git a/macros/marketplace/footprint/README.md b/macros/marketplace/footprint/README.md new file mode 100644 index 0000000..942ec2f --- /dev/null +++ b/macros/marketplace/footprint/README.md @@ -0,0 +1,39 @@ +# Footprint Analytics API Integration + +Footprint Analytics provides comprehensive blockchain data analytics with APIs for accessing DeFi, NFT, GameFi, and cross-chain data insights. + +## Setup + +1. Get your Footprint API key from [Footprint Analytics Dashboard](https://www.footprint.network/dashboard) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/FOOTPRINT` + +3. Deploy the Footprint marketplace functions: + ```bash + dbt run --models footprint__ footprint_utils__footprint_utils + ``` + +## Functions + +### `footprint.get(path, query_args)` +Make GET requests to Footprint Analytics API endpoints. + +### `footprint.post(path, body)` +Make POST requests to Footprint Analytics API endpoints. + +## Examples + +```sql +-- Get DeFi protocol TVL data +SELECT footprint.get('/api/v1/defi/protocol/tvl', {'protocol': 'uniswap', 'chain': 'ethereum'}); + +-- Get NFT market trends +SELECT footprint.get('/api/v1/nft/market/overview', {'timeframe': '7d'}); + +-- Get GameFi protocol statistics +SELECT footprint.get('/api/v1/gamefi/protocols', {'chain': 'polygon', 'limit': 20}); +``` + +## API Documentation + +- [Footprint Analytics API Documentation](https://docs.footprint.network/) \ No newline at end of file diff --git a/macros/marketplace/fred/README.md b/macros/marketplace/fred/README.md new file mode 100644 index 0000000..42ab730 --- /dev/null +++ b/macros/marketplace/fred/README.md @@ -0,0 +1,36 @@ +# FRED API Integration + +FRED (Federal Reserve Economic Data) provides access to economic data from the Federal Reserve Bank of St. Louis, including GDP, inflation, employment, and financial market data. + +## Setup + +1. Get your FRED API key from [FRED API Registration](https://fred.stlouisfed.org/docs/api/api_key.html) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/FRED` + +3. Deploy the FRED marketplace functions: + ```bash + dbt run --models fred__ fred_utils__fred_utils + ``` + +## Functions + +### `fred.get(path, query_args)` +Make GET requests to FRED API endpoints. + +## Examples + +```sql +-- Get GDP data +SELECT fred.get('/series/observations', {'series_id': 'GDP', 'api_key': 'your_key'}); + +-- Get unemployment rate +SELECT fred.get('/series/observations', {'series_id': 'UNRATE', 'api_key': 'your_key'}); + +-- Get inflation rate (CPI) +SELECT fred.get('/series/observations', {'series_id': 'CPIAUCSL', 'api_key': 'your_key'}); +``` + +## API Documentation + +- [FRED API Documentation](https://fred.stlouisfed.org/docs/api/fred/) \ No newline at end of file diff --git a/macros/marketplace/github/README.md b/macros/marketplace/github/README.md new file mode 100644 index 0000000..53b558b --- /dev/null +++ b/macros/marketplace/github/README.md @@ -0,0 +1,668 @@ +# GitHub Actions Integration for Livequery + +A comprehensive GitHub Actions integration that provides both scalar functions (UDFs) and table functions (UDTFs) for interacting with GitHub's REST API. Monitor workflows, retrieve logs, trigger dispatches, and analyze CI/CD data directly from your data warehouse. + +## Prerequisites & Setup + +### Authentication Setup + +The integration uses GitHub Personal Access Tokens (PAT) or GitHub App tokens for authentication. + +#### Option 1: Personal Access Token (Recommended for Development) + +1. Go to [GitHub Settings โ†’ Developer settings โ†’ Personal access tokens](https://github.com/settings/tokens) +2. Click "Generate new token (classic)" +3. Select required scopes: + - `repo` - Full control of private repositories + - `actions:read` - Read access to Actions (minimum required) + - `actions:write` - Write access to Actions (for triggering workflows) + - `workflow` - Update GitHub Action workflows (for enable/disable) +4. Copy the generated token +5. Store securely in your secrets management system + +#### Option 2: GitHub App (Recommended for Production) + +1. Create a GitHub App in your organization settings +2. Grant required permissions: + - **Actions**: Read & Write + - **Contents**: Read + - **Metadata**: Read +3. Install the app on repositories you want to access +4. Use the app's installation token + +### Environment Setup + +The integration automatically handles authentication through Livequery's secrets management: + +- **System users**: Uses `_FSC_SYS/GITHUB` secret path +- **Regular users**: Uses `vault/github/api` secret path + +## Quick Start + +### 1. List Repository Workflows + +```sql +-- Get all workflows for a repository +SELECT * FROM TABLE( + github_actions.tf_workflows('your-org', 'your-repo') +); + +-- Or as JSON object +SELECT github_actions.workflows('your-org', 'your-repo') as workflows_data; +``` + +### 2. Monitor Workflow Runs + +```sql +-- Get recent workflow runs with status filtering +SELECT * FROM TABLE( + github_actions.tf_runs('your-org', 'your-repo', {'status': 'completed', 'per_page': 10}) +); + +-- Get runs for a specific workflow +SELECT * FROM TABLE( + github_actions.tf_workflow_runs('your-org', 'your-repo', 'ci.yml') +); +``` + +### 3. Analyze Failed Jobs + +```sql +-- Get failed jobs with complete logs for troubleshooting +SELECT + job_name, + job_conclusion, + job_url, + logs +FROM TABLE( + github_actions.tf_failed_jobs_with_logs('your-org', 'your-repo', '12345678') +); +``` + +### 4. Trigger Workflow Dispatch + +```sql +-- Trigger a workflow manually +SELECT github_actions.workflow_dispatches( + 'your-org', + 'your-repo', + 'deploy.yml', + { + 'ref': 'main', + 'inputs': { + 'environment': 'staging', + 'debug': 'true' + } + } +) as dispatch_result; +``` + +## Function Reference + +### Utility Functions (`github_utils` schema) + +#### `github_utils.octocat()` +Test GitHub API connectivity and authentication. +```sql +SELECT github_utils.octocat(); +-- Returns: GitHub API response with Octocat ASCII art +``` + +#### `github_utils.headers()` +Get properly formatted GitHub API headers. +```sql +SELECT github_utils.headers(); +-- Returns: '{"Authorization": "Bearer {TOKEN}", ...}' +``` + +#### `github_utils.get_api(route, query)` +Make GET requests to GitHub API. +```sql +SELECT github_utils.get_api('repos/your-org/your-repo', {'per_page': 10}); +``` + +#### `github_utils.post_api(route, data)` +Make POST requests to GitHub API. +```sql +SELECT github_utils.post_api('repos/your-org/your-repo/issues', { + 'title': 'New Issue', + 'body': 'Issue description' +}); +``` + +#### `github_utils.put_api(route, data)` +Make PUT requests to GitHub API. +```sql +SELECT github_utils.put_api('repos/your-org/your-repo/actions/workflows/ci.yml/enable', {}); +``` + +### Workflow Functions (`github_actions` schema) + +#### Scalar Functions (Return JSON Objects) + +##### `github_actions.workflows(owner, repo[, query])` +List repository workflows. +```sql +-- Basic usage +SELECT github_actions.workflows('FlipsideCrypto', 'admin-models'); + +-- With query parameters +SELECT github_actions.workflows('FlipsideCrypto', 'admin-models', {'per_page': 50}); +``` + +##### `github_actions.runs(owner, repo[, query])` +List workflow runs for a repository. +```sql +-- Get recent runs +SELECT github_actions.runs('your-org', 'your-repo'); + +-- Filter by status and branch +SELECT github_actions.runs('your-org', 'your-repo', { + 'status': 'completed', + 'branch': 'main', + 'per_page': 20 +}); +``` + +##### `github_actions.workflow_runs(owner, repo, workflow_id[, query])` +List runs for a specific workflow. +```sql +-- Get runs for CI workflow +SELECT github_actions.workflow_runs('your-org', 'your-repo', 'ci.yml'); + +-- With filtering +SELECT github_actions.workflow_runs('your-org', 'your-repo', 'ci.yml', { + 'status': 'failure', + 'per_page': 10 +}); +``` + +##### `github_actions.workflow_dispatches(owner, repo, workflow_id[, body])` +Trigger a workflow dispatch event. +```sql +-- Simple dispatch (uses main branch) +SELECT github_actions.workflow_dispatches('your-org', 'your-repo', 'deploy.yml'); + +-- With custom inputs +SELECT github_actions.workflow_dispatches('your-org', 'your-repo', 'deploy.yml', { + 'ref': 'develop', + 'inputs': { + 'environment': 'staging', + 'version': '1.2.3' + } +}); +``` + +##### `github_actions.workflow_enable(owner, repo, workflow_id)` +Enable a workflow. +```sql +SELECT github_actions.workflow_enable('your-org', 'your-repo', 'ci.yml'); +``` + +##### `github_actions.workflow_disable(owner, repo, workflow_id)` +Disable a workflow. +```sql +SELECT github_actions.workflow_disable('your-org', 'your-repo', 'ci.yml'); +``` + +##### `github_actions.workflow_run_logs(owner, repo, run_id)` +Get download URL for workflow run logs. +```sql +SELECT github_actions.workflow_run_logs('your-org', 'your-repo', '12345678'); +``` + +##### `github_actions.job_logs(owner, repo, job_id)` +Get plain text logs for a specific job. +```sql +SELECT github_actions.job_logs('your-org', 'your-repo', '87654321'); +``` + +##### `github_actions.workflow_run_jobs(owner, repo, run_id[, query])` +List jobs for a workflow run. +```sql +-- Get all jobs +SELECT github_actions.workflow_run_jobs('your-org', 'your-repo', '12345678'); + +-- Filter to latest attempt only +SELECT github_actions.workflow_run_jobs('your-org', 'your-repo', '12345678', { + 'filter': 'latest' +}); +``` + +#### Table Functions (Return Structured Data) + +##### `github_actions.tf_workflows(owner, repo[, query])` +List workflows as structured table data. +```sql +SELECT + id, + name, + path, + state, + created_at, + updated_at, + badge_url, + html_url +FROM TABLE(github_actions.tf_workflows('your-org', 'your-repo')); +``` + +##### `github_actions.tf_runs(owner, repo[, query])` +List workflow runs as structured table data. +```sql +SELECT + id, + name, + status, + conclusion, + head_branch, + head_sha, + run_number, + event, + created_at, + updated_at, + html_url +FROM TABLE(github_actions.tf_runs('your-org', 'your-repo', {'per_page': 20})); +``` + +##### `github_actions.tf_workflow_runs(owner, repo, workflow_id[, query])` +List runs for a specific workflow as structured table data. +```sql +SELECT + id, + name, + status, + conclusion, + run_number, + head_branch, + created_at, + html_url +FROM TABLE(github_actions.tf_workflow_runs('your-org', 'your-repo', 'ci.yml')); +``` + +##### `github_actions.tf_workflow_run_jobs(owner, repo, run_id[, query])` +List jobs for a workflow run as structured table data. +```sql +SELECT + id, + name, + status, + conclusion, + started_at, + completed_at, + runner_name, + runner_group_name, + html_url +FROM TABLE(github_actions.tf_workflow_run_jobs('your-org', 'your-repo', '12345678')); +``` + +##### `github_actions.tf_failed_jobs_with_logs(owner, repo, run_id)` +Get failed jobs with their complete logs for analysis. +```sql +SELECT + job_id, + job_name, + job_status, + job_conclusion, + job_url, + failed_steps, + logs +FROM TABLE(github_actions.tf_failed_jobs_with_logs('your-org', 'your-repo', '12345678')); +``` + +## Advanced Usage Examples + +### CI/CD Monitoring Dashboard + +```sql +-- Recent workflow runs with failure rate +WITH recent_runs AS ( + SELECT + name, + status, + conclusion, + head_branch, + created_at, + html_url + FROM TABLE(github_actions.tf_runs('your-org', 'your-repo', {'per_page': 100})) + WHERE created_at >= CURRENT_DATE - 7 +) +SELECT + name, + COUNT(*) as total_runs, + COUNT(CASE WHEN conclusion = 'success' THEN 1 END) as successful_runs, + COUNT(CASE WHEN conclusion = 'failure' THEN 1 END) as failed_runs, + ROUND(COUNT(CASE WHEN conclusion = 'failure' THEN 1 END) * 100.0 / COUNT(*), 2) as failure_rate_pct +FROM recent_runs +GROUP BY name +ORDER BY failure_rate_pct DESC; +``` + +### Failed Job Analysis + +#### Multi-Run Failure Analysis +```sql +-- Analyze failures across multiple runs +WITH failed_jobs AS ( + SELECT + r.id as run_id, + r.name as workflow_name, + r.head_branch, + r.created_at as run_created_at, + j.job_name, + j.job_conclusion, + j.logs + FROM TABLE(github_actions.tf_runs('your-org', 'your-repo', {'status': 'completed'})) r + CROSS JOIN TABLE(github_actions.tf_failed_jobs_with_logs('your-org', 'your-repo', r.id::TEXT)) j + WHERE r.conclusion = 'failure' + AND r.created_at >= CURRENT_DATE - 3 +) +SELECT + workflow_name, + job_name, + COUNT(*) as failure_count, + ARRAY_AGG(DISTINCT head_branch) as affected_branches, + ARRAY_AGG(logs LIMIT 3) as sample_logs +FROM failed_jobs +GROUP BY workflow_name, job_name +ORDER BY failure_count DESC; +``` + +#### Specific Job Log Analysis +```sql +-- Get detailed logs for a specific failed job +WITH specific_job AS ( + SELECT + id as job_id, + name as job_name, + status, + conclusion, + started_at, + completed_at, + html_url, + steps + FROM TABLE(github_actions.tf_workflow_run_jobs('your-org', 'your-repo', '12345678')) + WHERE name = 'Build and Test' -- Specify the job name you want to analyze + AND conclusion = 'failure' +) +SELECT + job_id, + job_name, + status, + conclusion, + started_at, + completed_at, + html_url, + steps, + github_actions.job_logs('your-org', 'your-repo', job_id::TEXT) as full_logs +FROM specific_job; +``` + +#### From Workflow ID to Failed Logs +```sql +-- Complete workflow: Workflow ID โ†’ Run ID โ†’ Failed Logs +WITH latest_failed_run AS ( + -- Step 1: Get the most recent failed run for your workflow + SELECT + id as run_id, + name as workflow_name, + status, + conclusion, + head_branch, + head_sha, + created_at, + html_url as run_url + FROM TABLE(github_actions.tf_workflow_runs('your-org', 'your-repo', 'ci.yml')) -- Your workflow ID here + WHERE conclusion = 'failure' + ORDER BY created_at DESC + LIMIT 1 +), +failed_jobs_with_logs AS ( + -- Step 2: Get all failed jobs and their logs for that run + SELECT + r.run_id, + r.workflow_name, + r.head_branch, + r.head_sha, + r.created_at, + r.run_url, + j.job_id, + j.job_name, + j.job_status, + j.job_conclusion, + j.job_url, + j.failed_steps, + j.logs + FROM latest_failed_run r + CROSS JOIN TABLE(github_actions.tf_failed_jobs_with_logs('your-org', 'your-repo', r.run_id::TEXT)) j +) +SELECT + run_id, + workflow_name, + head_branch, + created_at, + run_url, + job_name, + job_url, + -- Extract key error information from logs + CASE + WHEN CONTAINS(logs, 'npm ERR!') THEN 'NPM Error' + WHEN CONTAINS(logs, 'fatal:') THEN 'Git Error' + WHEN CONTAINS(logs, 'Error: Process completed with exit code') THEN 'Process Exit Error' + WHEN CONTAINS(logs, 'timeout') THEN 'Timeout Error' + ELSE 'Other Error' + END as error_type, + -- Get first error line from logs + REGEXP_SUBSTR(logs, '.*Error[^\\n]*', 1, 1) as first_error_line, + -- Full logs for detailed analysis + logs as full_logs +FROM failed_jobs_with_logs +ORDER BY job_name; +``` + +#### Quick Workflow ID to Run ID Lookup +```sql +-- Simple: Just get run IDs for a specific workflow +SELECT + id as run_id, + status, + conclusion, + head_branch, + created_at, + html_url +FROM TABLE(github_actions.tf_workflow_runs('your-org', 'your-repo', 'ci.yml')) -- Replace with your workflow ID +WHERE conclusion = 'failure' +ORDER BY created_at DESC +LIMIT 5; +``` + +#### Failed Steps Deep Dive +```sql +-- Analyze failed steps within jobs and extract error patterns +WITH job_details AS ( + SELECT + id as job_id, + name as job_name, + conclusion, + steps, + github_actions.job_logs('your-org', 'your-repo', id::TEXT) as logs + FROM TABLE(github_actions.tf_workflow_run_jobs('your-org', 'your-repo', '12345678')) + WHERE conclusion = 'failure' +), +failed_steps AS ( + SELECT + job_id, + job_name, + step.value:name::STRING as step_name, + step.value:conclusion::STRING as step_conclusion, + step.value:number::INTEGER as step_number, + logs + FROM job_details, + LATERAL FLATTEN(input => steps:steps) step + WHERE step.value:conclusion::STRING = 'failure' +) +SELECT + job_name, + step_name, + step_number, + step_conclusion, + -- Extract error messages from logs (first 1000 chars) + SUBSTR(logs, GREATEST(1, CHARINDEX('Error:', logs) - 50), 1000) as error_context, + -- Extract common error patterns + CASE + WHEN CONTAINS(logs, 'npm ERR!') THEN 'NPM Error' + WHEN CONTAINS(logs, 'fatal:') THEN 'Git Error' + WHEN CONTAINS(logs, 'Error: Process completed with exit code') THEN 'Process Exit Error' + WHEN CONTAINS(logs, 'timeout') THEN 'Timeout Error' + WHEN CONTAINS(logs, 'permission denied') THEN 'Permission Error' + ELSE 'Other Error' + END as error_category +FROM failed_steps +ORDER BY job_name, step_number; +``` + +### Workflow Performance Metrics + +```sql +-- Average workflow duration by branch +SELECT + head_branch, + AVG(DATEDIFF(second, run_started_at, updated_at)) as avg_duration_seconds, + COUNT(*) as run_count, + COUNT(CASE WHEN conclusion = 'success' THEN 1 END) as success_count +FROM TABLE(github_actions.tf_runs('your-org', 'your-repo', {'per_page': 200})) +WHERE run_started_at IS NOT NULL + AND updated_at IS NOT NULL + AND status = 'completed' + AND created_at >= CURRENT_DATE - 30 +GROUP BY head_branch +ORDER BY avg_duration_seconds DESC; +``` + +### Automated Workflow Management + +```sql +-- Conditionally trigger deployment based on main branch success +WITH latest_main_run AS ( + SELECT + id, + conclusion, + head_sha, + created_at + FROM TABLE(github_actions.tf_runs('your-org', 'your-repo', { + 'branch': 'main', + 'per_page': 1 + })) + ORDER BY created_at DESC + LIMIT 1 +) +SELECT + CASE + WHEN conclusion = 'success' THEN + github_actions.workflow_dispatches('your-org', 'your-repo', 'deploy.yml', { + 'ref': 'main', + 'inputs': {'sha': head_sha} + }) + ELSE + OBJECT_CONSTRUCT('skipped', true, 'reason', 'main branch tests failed') + END as deployment_result +FROM latest_main_run; +``` + +## Error Handling + +All functions return structured responses with error information: + +```sql +-- Check for API errors +WITH api_response AS ( + SELECT github_actions.workflows('invalid-org', 'invalid-repo') as response +) +SELECT + response:status_code as status_code, + response:error as error_message, + response:data as data +FROM api_response; +``` + +Common HTTP status codes: +- **200**: Success +- **401**: Unauthorized (check token permissions) +- **403**: Forbidden (check repository access) +- **404**: Not found (check org/repo/workflow names) +- **422**: Validation failed (check input parameters) + +## Rate Limiting + +GitHub API has rate limits: +- **Personal tokens**: 5,000 requests per hour +- **GitHub App tokens**: 5,000 requests per hour per installation +- **Search API**: 30 requests per minute + +The functions automatically handle rate limiting through Livequery's retry mechanisms. + +## Security Best Practices + +1. **Use minimal permissions**: Only grant necessary scopes to tokens +2. **Rotate tokens regularly**: Set expiration dates and rotate tokens +3. **Use GitHub Apps for production**: More secure than personal access tokens +4. **Monitor usage**: Track API calls to avoid rate limits +5. **Secure storage**: Use proper secrets management for tokens + +## Troubleshooting + +### Common Issues + +**Authentication Errors (401)** +```sql +-- Test authentication +SELECT github_utils.octocat(); +-- Should return status_code = 200 if token is valid +``` + +**Permission Errors (403)** +- Ensure token has required scopes (`actions:read` minimum) +- Check if repository is accessible to the token owner +- For private repos, ensure `repo` scope is granted + +**Workflow Not Found (404)** +```sql +-- List available workflows first +SELECT * FROM TABLE(github_actions.tf_workflows('your-org', 'your-repo')); +``` + +**Rate Limiting (403 with rate limit message)** +- Implement request spacing in your queries +- Use pagination parameters to reduce request frequency +- Monitor your rate limit status + +### Performance Tips + +1. **Use table functions for analytics**: More efficient for large datasets +2. **Implement pagination**: Use `per_page` parameter to control response size +3. **Cache results**: Store frequently accessed data in tables +4. **Filter at API level**: Use query parameters instead of SQL WHERE clauses +5. **Batch operations**: Combine multiple API calls where possible + +## GitHub API Documentation + +- [GitHub REST API](https://docs.github.com/en/rest) - Complete API reference +- [Actions API](https://docs.github.com/en/rest/actions) - Actions-specific endpoints +- [Authentication](https://docs.github.com/en/rest/overview/authenticating-to-the-rest-api) - Token setup and permissions +- [Rate Limiting](https://docs.github.com/en/rest/overview/rate-limits-for-the-rest-api) - API limits and best practices + +## Function Summary + +| Function | Type | Purpose | +|----------|------|---------| +| `github_utils.octocat()` | UDF | Test API connectivity | +| `github_utils.get_api/post_api/put_api()` | UDF | Generic API requests | +| `github_actions.workflows()` | UDF | List workflows (JSON) | +| `github_actions.runs()` | UDF | List runs (JSON) | +| `github_actions.workflow_runs()` | UDF | List workflow runs (JSON) | +| `github_actions.workflow_dispatches()` | UDF | Trigger workflows | +| `github_actions.workflow_enable/disable()` | UDF | Control workflow state | +| `github_actions.*_logs()` | UDF | Retrieve logs | +| `github_actions.tf_*()` | UDTF | Structured table data | +| `github_actions.tf_failed_jobs_with_logs()` | UDTF | Failed job analysis | + +Ready to monitor and automate your GitHub Actions workflows directly from your data warehouse! diff --git a/macros/marketplace/github/actions_udfs.yaml.sql b/macros/marketplace/github/actions_udfs.yaml.sql index 7791e68..c6d5977 100644 --- a/macros/marketplace/github/actions_udfs.yaml.sql +++ b/macros/marketplace/github/actions_udfs.yaml.sql @@ -13,7 +13,7 @@ COMMENT = $$[List repository workflows](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#list-repository-workflows).$$ sql: | SELECT - {{ utils_schema_name }}.GET( + {{ utils_schema_name }}.get_api( CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows'), query ):data::OBJECT @@ -40,7 +40,7 @@ COMMENT = $$Lists all workflow runs for a repository. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-repository).$$ sql: | SELECT - {{ utils_schema_name }}.GET( + {{ utils_schema_name }}.get_api( CONCAT_WS('/', 'repos', owner, repo, 'actions/runs'), query ):data::OBJECT @@ -68,7 +68,7 @@ COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$ sql: | SELECT - {{ utils_schema_name }}.GET( + {{ utils_schema_name }}.get_api( CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'runs'), query ):data::OBJECT @@ -97,7 +97,7 @@ COMMENT = $$You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace workflow_id with the workflow file name. For example, you could use main.yaml. [Docs](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#create-a-workflow-dispatch-event).$$ sql: | SELECT - {{ utils_schema_name }}.POST( + {{ utils_schema_name }}.post_api( CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'dispatches'), COALESCE(body, {'ref': 'main'})::OBJECT )::OBJECT @@ -126,7 +126,7 @@ COMMENT = $$Enables a workflow. You can replace workflow_id with the workflow file name. For example, you could use main.yaml. [Docs](https://docs.github.com/en/rest/reference/actions#enable-a-workflow).$$ sql: | SELECT - {{ utils_schema_name }}.PUT( + {{ utils_schema_name }}.put_api( CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'enable'), {} )::OBJECT @@ -141,8 +141,70 @@ COMMENT = $$Disables a workflow. You can replace workflow_id with the workflow file name. For example, you could use main.yaml. [Docs](https://docs.github.com/en/rest/reference/actions#disable-a-workflow).$$ sql: | SELECT - {{ utils_schema_name }}.PUT( + {{ utils_schema_name }}.put_api( CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'disable'), {} )::OBJECT + +- name: {{ schema_name -}}.workflow_run_logs + signature: + - [owner, "TEXT"] + - [repo, "TEXT"] + - [run_id, "TEXT"] + return_type: + - "TEXT" + options: | + COMMENT = $$Download workflow run logs as a ZIP archive. Gets a redirect URL to the actual log archive. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#download-workflow-run-logs).$$ + sql: | + SELECT + {{ utils_schema_name }}.get_api( + CONCAT_WS('/', 'repos', owner, repo, 'actions/runs', run_id, 'logs'), + {} + ):data::TEXT + +- name: {{ schema_name -}}.job_logs + signature: + - [owner, "TEXT"] + - [repo, "TEXT"] + - [job_id, "TEXT"] + return_type: + - "TEXT" + options: | + COMMENT = $$Download job logs. Gets the plain text logs for a specific job. [Docs](https://docs.github.com/en/rest/actions/workflow-jobs?apiVersion=2022-11-28#download-job-logs-for-a-workflow-run).$$ + sql: | + SELECT + {{ utils_schema_name }}.get_api( + CONCAT_WS('/', 'repos', owner, repo, 'actions/jobs', job_id, 'logs'), + {} + ):data::TEXT + +- name: {{ schema_name -}}.workflow_run_jobs + signature: + - [owner, "TEXT"] + - [repo, "TEXT"] + - [run_id, "TEXT"] + - [query, "OBJECT"] + return_type: + - "OBJECT" + options: | + COMMENT = $$Lists jobs for a workflow run. [Docs](https://docs.github.com/en/rest/actions/workflow-jobs?apiVersion=2022-11-28#list-jobs-for-a-workflow-run).$$ + sql: | + SELECT + {{ utils_schema_name }}.get_api( + CONCAT_WS('/', 'repos', owner, repo, 'actions/runs', run_id, 'jobs'), + query + ):data::OBJECT +- name: {{ schema_name -}}.workflow_run_jobs + signature: + - [owner, "TEXT"] + - [repo, "TEXT"] + - [run_id, "TEXT"] + return_type: + - "OBJECT" + options: | + COMMENT = $$Lists jobs for a workflow run. [Docs](https://docs.github.com/en/rest/actions/workflow-jobs?apiVersion=2022-11-28#list-jobs-for-a-workflow-run).$$ + sql: | + SELECT + {{ schema_name -}}.workflow_run_jobs(owner, repo, run_id, {}) + {% endmacro %} \ No newline at end of file diff --git a/macros/marketplace/github/actions_udtfs.yml.sql b/macros/marketplace/github/actions_udtfs.yml.sql index fded7ba..512978c 100644 --- a/macros/marketplace/github/actions_udtfs.yml.sql +++ b/macros/marketplace/github/actions_udtfs.yml.sql @@ -166,4 +166,193 @@ SELECT * FROM TABLE({{ schema_name -}}.tf_workflow_runs(owner, repo, WORKFLKOW_ID, {})) -{% endmacro %} \ No newline at end of file +- name: {{ schema_name -}}.tf_workflow_run_jobs + signature: + - [owner, "TEXT"] + - [repo, "TEXT"] + - [run_id, "TEXT"] + - [query, "OBJECT"] + return_type: + - "TABLE(id NUMBER, run_id NUMBER, workflow_name STRING, head_branch STRING, run_url STRING, run_attempt NUMBER, node_id STRING, head_sha STRING, url STRING, html_url STRING, status STRING, conclusion STRING, created_at TIMESTAMP, started_at TIMESTAMP, completed_at TIMESTAMP, name STRING, check_run_url STRING, labels VARIANT, runner_id NUMBER, runner_name STRING, runner_group_id NUMBER, runner_group_name STRING, steps VARIANT)" + options: | + COMMENT = $$Lists jobs for a workflow run as a table. [Docs](https://docs.github.com/en/rest/actions/workflow-jobs?apiVersion=2022-11-28#list-jobs-for-a-workflow-run).$$ + sql: | + WITH response AS + ( + SELECT + github_actions.workflow_run_jobs(OWNER, REPO, RUN_ID, QUERY) AS response + ) + SELECT + value:id::NUMBER AS id + ,value:run_id::NUMBER AS run_id + ,value:workflow_name::STRING AS workflow_name + ,value:head_branch::STRING AS head_branch + ,value:run_url::STRING AS run_url + ,value:run_attempt::NUMBER AS run_attempt + ,value:node_id::STRING AS node_id + ,value:head_sha::STRING AS head_sha + ,value:url::STRING AS url + ,value:html_url::STRING AS html_url + ,value:status::STRING AS status + ,value:conclusion::STRING AS conclusion + ,value:created_at::TIMESTAMP AS created_at + ,value:started_at::TIMESTAMP AS started_at + ,value:completed_at::TIMESTAMP AS completed_at + ,value:name::STRING AS name + ,value:check_run_url::STRING AS check_run_url + ,value:labels::VARIANT AS labels + ,value:runner_id::NUMBER AS runner_id + ,value:runner_name::STRING AS runner_name + ,value:runner_group_id::NUMBER AS runner_group_id + ,value:runner_group_name::STRING AS runner_group_name + ,value:steps::VARIANT AS steps + FROM response, LATERAL FLATTEN( input=> response:jobs) + +- name: {{ schema_name -}}.tf_workflow_run_jobs + signature: + - [owner, "TEXT"] + - [repo, "TEXT"] + - [run_id, "TEXT"] + return_type: + - "TABLE(id NUMBER, run_id NUMBER, workflow_name STRING, head_branch STRING, run_url STRING, run_attempt NUMBER, node_id STRING, head_sha STRING, url STRING, html_url STRING, status STRING, conclusion STRING, created_at TIMESTAMP, started_at TIMESTAMP, completed_at TIMESTAMP, name STRING, check_run_url STRING, labels VARIANT, runner_id NUMBER, runner_name STRING, runner_group_id NUMBER, runner_group_name STRING, steps VARIANT)" + options: | + COMMENT = $$Lists jobs for a workflow run as a table. [Docs](https://docs.github.com/en/rest/actions/workflow-jobs?apiVersion=2022-11-28#list-jobs-for-a-workflow-run).$$ + sql: | + SELECT * + FROM TABLE({{ schema_name -}}.tf_workflow_run_jobs(owner, repo, run_id, {})) + +- name: {{ schema_name -}}.tf_failed_jobs_with_logs + signature: + - [owner, "TEXT"] + - [repo, "TEXT"] + - [run_id, "TEXT"] + return_type: + - "TABLE(run_id STRING, job_id NUMBER, job_name STRING, job_status STRING, job_conclusion STRING, job_url STRING, workflow_name STRING, failed_steps VARIANT, logs TEXT, failed_step_logs ARRAY)" + options: | + COMMENT = $$Gets failed jobs for a workflow run with their complete logs. Combines job info with log content for analysis.$$ + sql: | + WITH failed_jobs AS ( + SELECT + run_id::STRING AS run_id, + id AS job_id, + name AS job_name, + status AS job_status, + conclusion AS job_conclusion, + html_url AS job_url, + workflow_name, + steps AS failed_steps + FROM TABLE({{ schema_name -}}.tf_workflow_run_jobs(owner, repo, run_id)) + WHERE conclusion = 'failure' + ), + jobs_with_logs AS ( + SELECT + run_id, + job_id, + job_name, + job_status, + job_conclusion, + job_url, + workflow_name, + failed_steps, + {{ schema_name -}}.job_logs(owner, repo, job_id::TEXT) AS logs + FROM failed_jobs + ), + error_sections AS ( + SELECT + run_id, + job_id, + job_name, + job_status, + job_conclusion, + job_url, + workflow_name, + failed_steps, + logs, + ARRAY_AGG(section.value) AS failed_step_logs + FROM jobs_with_logs, + LATERAL FLATTEN(INPUT => SPLIT(logs, '##[group]')) section + WHERE CONTAINS(section.value, '##[error]') + GROUP BY run_id, job_id, job_name, job_status, job_conclusion, job_url, workflow_name, failed_steps, logs + ) + SELECT + run_id, + job_id, + job_name, + job_status, + job_conclusion, + job_url, + workflow_name, + failed_steps, + logs, + COALESCE(failed_step_logs, ARRAY_CONSTRUCT()) AS failed_step_logs + FROM jobs_with_logs + LEFT JOIN error_sections USING (run_id, job_id) + +- name: {{ schema_name -}}.tf_failure_analysis_with_ai + signature: + - [owner, "TEXT", "GitHub repository owner/organization name"] + - [repo, "TEXT", "GitHub repository name"] + - [run_id, "TEXT", "GitHub Actions run ID to analyze"] + - [ai_provider, "TEXT", "AI provider to use: 'cortex' (Snowflake built-in AI)"] + - [model_name, "STRING", "Model name (required): 'mistral-large', 'mistral-7b', 'llama2-70b-chat', 'mixtral-8x7b'"] + - [ai_prompt, "STRING", "Custom AI analysis prompt. Leave empty to use default failure analysis prompt."] + return_type: + - "TABLE(run_id STRING, ai_analysis STRING, total_failures NUMBER, failure_metadata ARRAY)" + options: | + COMMENT = $$Gets GitHub Actions failure analysis using Snowflake Cortex AI with custom prompts for Slack notifications.$$ + sql: | + WITH failure_data AS ( + SELECT + run_id, + COUNT(*) as total_failures, + ARRAY_AGG(OBJECT_CONSTRUCT( + 'workflow_name', workflow_name, + 'run_id', run_id, + 'job_name', job_name, + 'job_id', job_id, + 'job_url', job_url, + 'error_sections', ARRAY_SIZE(failed_step_logs), + 'logs_preview', ARRAY_TO_STRING(failed_step_logs, '\n') + )) as failure_metadata, + LISTAGG( + CONCAT( + 'Workflow: ', workflow_name, '\n', + 'Job: ', job_name, '\n', + 'Job ID: ', job_id, '\n', + 'Run ID: ', run_id, '\n', + 'Error: ', ARRAY_TO_STRING(failed_step_logs, '\n') + ), + '\n\n---\n\n' + ) WITHIN GROUP (ORDER BY job_name) as job_details + FROM TABLE({{ schema_name -}}.tf_failed_jobs_with_logs(owner, repo, run_id)) + GROUP BY run_id + ) + SELECT + run_id::STRING, + snowflake.cortex.complete( + model_name, + CONCAT( + COALESCE( + NULLIF(ai_prompt, ''), + 'Analyze these GitHub Actions failures and provide:\n1. Common failure patterns\n2. Root cause analysis\n3. Prioritized action items\n\nKeep it concise with 1-2 sentences per section in markdown format.\n\n' + ), + job_details + ) + ) as ai_analysis, + total_failures, + failure_metadata + FROM failure_data + +- name: {{ schema_name -}}.tf_failure_analysis_with_ai + signature: + - [owner, "TEXT"] + - [repo, "TEXT"] + - [run_id, "TEXT"] + return_type: + - "TABLE(run_id STRING, ai_analysis STRING, total_failures NUMBER, failure_metadata ARRAY)" + options: | + COMMENT = $$Gets GitHub Actions failure analysis with default AI provider (cortex) for Slack notifications.$$ + sql: | + SELECT * FROM TABLE({{ schema_name -}}.tf_failure_analysis_with_ai(owner, repo, run_id, 'cortex', 'mistral-large', '')) + +{% endmacro %} diff --git a/macros/marketplace/github/utils_udfs.yaml.sql b/macros/marketplace/github/utils_udfs.yaml.sql index dfc9325..f2dc1d2 100644 --- a/macros/marketplace/github/utils_udfs.yaml.sql +++ b/macros/marketplace/github/utils_udfs.yaml.sql @@ -10,16 +10,27 @@ options: | COMMENT = $$Verify token [Authenticating to the REST API](https://docs.github.com/en/rest/overview/authenticating-to-the-rest-api?apiVersion=2022-11-28).$$ sql: | - SELECT + SELECT + {% set v2_exists = check_udf_api_v2_exists() %} + {% if v2_exists -%} + live.udf_api_v2( + 'GET', + 'https://api.github.com/octocat', + {'Authorization': 'Bearer {TOKEN}', 'X-GitHub-Api-Version': '2022-11-28', 'fsc-quantum-execution-mode': 'async'}, + {}, + IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'Vault/github/api'), + TRUE + ) + {%- else -%} live.udf_api( 'GET', 'https://api.github.com/octocat', - {'Authorization': 'Bearer {TOKEN}', - 'X-GitHub-Api-Version': '2022-11-28'}, + {'Authorization': 'Bearer {TOKEN}', 'X-GitHub-Api-Version': '2022-11-28'}, {}, - IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'vault/github/api') - - ) as response + IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'Vault/github/api') + ) + {%- endif %} + as response - name: {{ schema_name -}}.headers signature: [] @@ -32,9 +43,10 @@ sql: | SELECT '{"Authorization": "Bearer {TOKEN}", "X-GitHub-Api-Version": "2022-11-28", - "Accept": "application/vnd.github+json"}' + "Accept": "application/vnd.github+json" + }' -- name: {{ schema_name -}}.get +- name: {{ schema_name -}}.get_api signature: - [route, "TEXT"] - [query, "OBJECT"] @@ -43,15 +55,28 @@ options: | COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$ sql: | - SELECT + SELECT + {% set v2_exists = check_udf_api_v2_exists() %} + {% if v2_exists -%} + live.udf_api_v2( + 'GET', + CONCAT_WS('/', 'https://api.github.com', route || '?') || utils.udf_urlencode(query), + PARSE_JSON({{ schema_name -}}.headers()), + {}, + IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'Vault/github/api'), + TRUE + ) + {%- else -%} live.udf_api( 'GET', CONCAT_WS('/', 'https://api.github.com', route || '?') || utils.udf_urlencode(query), PARSE_JSON({{ schema_name -}}.headers()), {}, - IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'vault/github/api') - ) -- name: {{ schema_name -}}.post + IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'Vault/github/api') + ) + {%- endif %} + as response +- name: {{ schema_name -}}.post_api signature: - [route, "TEXT"] - [data, "OBJECT"] @@ -60,15 +85,28 @@ options: | COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$ sql: | - SELECT + SELECT + {% set v2_exists = check_udf_api_v2_exists() %} + {% if v2_exists -%} + live.udf_api_v2( + 'POST', + CONCAT_WS('/', 'https://api.github.com', route), + PARSE_JSON({{ schema_name -}}.headers()), + data, + IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'Vault/github/api'), + TRUE + ) + {%- else -%} live.udf_api( 'POST', CONCAT_WS('/', 'https://api.github.com', route), PARSE_JSON({{ schema_name -}}.headers()), data, - IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'vault/github/api') - ) -- name: {{ schema_name -}}.put + IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'Vault/github/api') + ) + {%- endif %} + as response +- name: {{ schema_name -}}.put_api signature: - [route, "TEXT"] - [data, "OBJECT"] @@ -77,12 +115,25 @@ options: | COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$ sql: | - SELECT + SELECT + {% set v2_exists = check_udf_api_v2_exists() %} + {% if v2_exists -%} + live.udf_api_v2( + 'PUT', + CONCAT_WS('/', 'https://api.github.com', route), + PARSE_JSON({{ schema_name -}}.headers()), + data, + IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'Vault/github/api'), + TRUE + ) + {%- else -%} live.udf_api( 'PUT', CONCAT_WS('/', 'https://api.github.com', route), PARSE_JSON({{ schema_name -}}.headers()), data, - IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'vault/github/api') - ) -{% endmacro %} \ No newline at end of file + IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'Vault/github/api') + ) + {%- endif %} + as response +{% endmacro %} diff --git a/macros/marketplace/helius/README.md b/macros/marketplace/helius/README.md new file mode 100644 index 0000000..a3caa9f --- /dev/null +++ b/macros/marketplace/helius/README.md @@ -0,0 +1,44 @@ +# Helius API Integration + +Helius provides high-performance Solana RPC infrastructure and enhanced APIs for accessing Solana blockchain data, including DAS (Digital Asset Standard) APIs. + +## Setup + +1. Get your Helius API key from [Helius Dashboard](https://dashboard.helius.dev/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/HELIUS` + +3. Deploy the Helius marketplace functions: + ```bash + dbt run --models helius__ helius_utils__helius_utils + ``` + +## Functions + +### `helius.get(path, query_args)` +Make GET requests to Helius API endpoints. + +### `helius.post(path, body)` +Make POST requests to Helius API endpoints. + +## Examples + +```sql +-- Get Solana account info +SELECT helius.post('/rpc', { + 'jsonrpc': '2.0', + 'method': 'getAccountInfo', + 'params': ['account_address'], + 'id': 1 +}); + +-- Get compressed NFTs by owner +SELECT helius.get('/v0/addresses/owner_address/nfts', {'compressed': true}); + +-- Get transaction history +SELECT helius.get('/v0/addresses/address/transactions', {'limit': 100}); +``` + +## API Documentation + +- [Helius API Documentation](https://docs.helius.dev/) \ No newline at end of file diff --git a/macros/marketplace/helius/apis_udfs.yaml.sql b/macros/marketplace/helius/apis_udfs.yaml.sql index d2c4c80..894f140 100644 --- a/macros/marketplace/helius/apis_udfs.yaml.sql +++ b/macros/marketplace/helius/apis_udfs.yaml.sql @@ -22,18 +22,37 @@ options: | COMMENT = $$Returns the native Solana balance (in lamports) and all token balances for a given address. [Helius docs here](https://docs.helius.xyz/solana-apis/balances-api).$$ sql: | - SELECT live.udf_api( - 'GET', - CASE - WHEN NETWORK = 'devnet' THEN - concat('https://api-devnet.helius.xyz/v0/addresses/', ADDRESS, '/balances?api-key={API_KEY}') - ELSE - concat('https://api.helius.xyz/v0/addresses/', ADDRESS, '/balances?api-key={API_KEY}') - END, - {}, - {}, - '_FSC_SYS/HELIUS' - ) as response + SELECT + {% set v2_exists = check_udf_api_v2_exists() %} + {% if v2_exists -%} + live.udf_api_v2( + 'GET', + CASE + WHEN NETWORK = 'devnet' THEN + concat('https://api-devnet.helius.xyz/v0/addresses/', ADDRESS, '/balances?api-key={API_KEY}') + ELSE + concat('https://api.helius.xyz/v0/addresses/', ADDRESS, '/balances?api-key={API_KEY}') + END, + {'fsc-quantum-execution-mode': 'async'}, + {}, + '_FSC_SYS/HELIUS', + TRUE + ) + {%- else -%} + live.udf_api( + 'GET', + CASE + WHEN NETWORK = 'devnet' THEN + concat('https://api-devnet.helius.xyz/v0/addresses/', ADDRESS, '/balances?api-key={API_KEY}') + ELSE + concat('https://api.helius.xyz/v0/addresses/', ADDRESS, '/balances?api-key={API_KEY}') + END, + {}, + {}, + '_FSC_SYS/HELIUS' + ) + {%- endif %} + as response - name: {{ schema_name -}}.parse_transactions signature: @@ -44,6 +63,6 @@ options: | COMMENT = $$Returns an array of enriched, human-readable transactions of the given transaction signatures. Up to 100 transactions per call. [Helius docs here](https://docs.helius.xyz/solana-apis/enhanced-transactions-api/parse-transaction-s).$$ sql: | - SELECT {{ utils_schema_name -}}.post(NETWORK, '/v0/transactions', {'transactions': TRANSACTIONS}) as response + SELECT {{ utils_schema_name -}}.post_api(NETWORK, '/v0/transactions', {'transactions': TRANSACTIONS}) as response {% endmacro %} \ No newline at end of file diff --git a/macros/marketplace/helius/helius_config_utils.sql b/macros/marketplace/helius/helius_config_utils.sql index 220aaa1..5c76d75 100644 --- a/macros/marketplace/helius/helius_config_utils.sql +++ b/macros/marketplace/helius/helius_config_utils.sql @@ -3,9 +3,9 @@ SELECT {{ schema_name -}}.rpc(NETWORK, '{{method}}', PARAMS) as response {% endmacro %} {% macro helius_get_call(schema_name, path) %} -SELECT {{ schema_name -}}.get(NETWORK, '{{path}}', QUERY_PARAMS) as response +SELECT {{ schema_name -}}.get_api(NETWORK, '{{path}}', QUERY_PARAMS) as response {% endmacro %} {% macro helius_post_call(schema_name, path) %} -SELECT {{ schema_name -}}.post(NETWORK, '{{path}}', BODY) as response +SELECT {{ schema_name -}}.post_api(NETWORK, '{{path}}', BODY) as response {% endmacro %} \ No newline at end of file diff --git a/macros/marketplace/helius/util_udfs.yaml.sql b/macros/marketplace/helius/util_udfs.yaml.sql index 93d4d45..947f6dd 100644 --- a/macros/marketplace/helius/util_udfs.yaml.sql +++ b/macros/marketplace/helius/util_udfs.yaml.sql @@ -3,7 +3,7 @@ This macro is used to generate the Helius base endpoints #} -- name: {{ schema -}}.get +- name: {{ schema_name }}.get_api signature: - [NETWORK, STRING, The network 'devnet' or 'mainnet'] - [PATH, STRING, The API path starting with '/'] @@ -13,20 +13,39 @@ options: | COMMENT = $$Used to issue an HTTP GET request to Helius.$$ sql: | - SELECT live.udf_api( - 'GET', - CASE - WHEN NETWORK = 'devnet' THEN - concat('https://api-devnet.helius.xyz', PATH, '?api-key={API_KEY}&', utils.udf_object_to_url_query_string(QUERY_PARAMS)) - ELSE - concat('https://api.helius.xyz', PATH, '?api-key={API_KEY}&', utils.udf_object_to_url_query_string(QUERY_PARAMS)) - END, - {}, - {}, - '_FSC_SYS/HELIUS' - ) as response + SELECT + {% set v2_exists = check_udf_api_v2_exists() %} + {% if v2_exists -%} + live.udf_api_v2( + 'GET', + CASE + WHEN NETWORK = 'devnet' THEN + concat('https://api-devnet.helius.xyz', PATH, '?api-key={API_KEY}&', utils.udf_object_to_url_query_string(QUERY_PARAMS)) + ELSE + concat('https://api.helius.xyz', PATH, '?api-key={API_KEY}&', utils.udf_object_to_url_query_string(QUERY_PARAMS)) + END, + {'fsc-quantum-execution-mode': 'async'}, + {}, + '_FSC_SYS/HELIUS', + TRUE + ) + {%- else -%} + live.udf_api( + 'GET', + CASE + WHEN NETWORK = 'devnet' THEN + concat('https://api-devnet.helius.xyz', PATH, '?api-key={API_KEY}&', utils.udf_object_to_url_query_string(QUERY_PARAMS)) + ELSE + concat('https://api.helius.xyz', PATH, '?api-key={API_KEY}&', utils.udf_object_to_url_query_string(QUERY_PARAMS)) + END, + {}, + {}, + '_FSC_SYS/HELIUS' + ) + {%- endif %} + as response -- name: {{ schema -}}.post +- name: {{ schema_name }}.post_api signature: - [NETWORK, STRING, The network 'devnet' or 'mainnet'] - [PATH, STRING, The API path starting with '/'] @@ -36,20 +55,39 @@ options: | COMMENT = $$Used to issue an HTTP POST request to Helius.$$ sql: | - SELECT live.udf_api( - 'POST', - CASE - WHEN NETWORK = 'devnet' THEN - concat('https://api-devnet.helius.xyz', PATH, '?api-key={API_KEY}') - ELSE - concat('https://api.helius.xyz', PATH, '?api-key={API_KEY}') - END, - {}, - BODY, - '_FSC_SYS/HELIUS' - ) as response + SELECT + {% set v2_exists = check_udf_api_v2_exists() %} + {% if v2_exists -%} + live.udf_api_v2( + 'POST', + CASE + WHEN NETWORK = 'devnet' THEN + concat('https://api-devnet.helius.xyz', PATH, '?api-key={API_KEY}') + ELSE + concat('https://api.helius.xyz', PATH, '?api-key={API_KEY}') + END, + {'fsc-quantum-execution-mode': 'async'}, + BODY, + '_FSC_SYS/HELIUS', + TRUE + ) + {%- else -%} + live.udf_api( + 'POST', + CASE + WHEN NETWORK = 'devnet' THEN + concat('https://api-devnet.helius.xyz', PATH, '?api-key={API_KEY}') + ELSE + concat('https://api.helius.xyz', PATH, '?api-key={API_KEY}') + END, + {}, + BODY, + '_FSC_SYS/HELIUS' + ) + {%- endif %} + as response -- name: {{ schema -}}.rpc +- name: {{ schema_name }}.rpc signature: - [NETWORK, STRING, The network 'devnet' or 'mainnet'] - [METHOD, STRING, The RPC method to call] @@ -59,17 +97,36 @@ options: | COMMENT = $$Used to issue an RPC call to Helius.$$ sql: | - SELECT live.udf_api( - 'POST', - CASE - WHEN NETWORK = 'devnet' THEN - 'https://devnet.helius-rpc.com?api-key={API_KEY}' - ELSE - 'https://mainnet.helius-rpc.com?api-key={API_KEY}' - END, - {}, - {'id': 1,'jsonrpc': '2.0','method': METHOD,'params': PARAMS}, - '_FSC_SYS/HELIUS' - ) as response + SELECT + {% set v2_exists = check_udf_api_v2_exists() %} + {% if v2_exists -%} + live.udf_api_v2( + 'POST', + CASE + WHEN NETWORK = 'devnet' THEN + 'https://devnet.helius-rpc.com?api-key={API_KEY}' + ELSE + 'https://mainnet.helius-rpc.com?api-key={API_KEY}' + END, + {'fsc-quantum-execution-mode': 'async'}, + {'id': 1,'jsonrpc': '2.0','method': METHOD,'params': PARAMS}, + '_FSC_SYS/HELIUS', + TRUE + ) + {%- else -%} + live.udf_api( + 'POST', + CASE + WHEN NETWORK = 'devnet' THEN + 'https://devnet.helius-rpc.com?api-key={API_KEY}' + ELSE + 'https://mainnet.helius-rpc.com?api-key={API_KEY}' + END, + {}, + {'id': 1,'jsonrpc': '2.0','method': METHOD,'params': PARAMS}, + '_FSC_SYS/HELIUS' + ) + {%- endif %} + as response {% endmacro %} diff --git a/macros/marketplace/nftscan/README.md b/macros/marketplace/nftscan/README.md new file mode 100644 index 0000000..4a1dac5 --- /dev/null +++ b/macros/marketplace/nftscan/README.md @@ -0,0 +1,36 @@ +# NFTScan API Integration + +NFTScan is a professional NFT data infrastructure platform providing comprehensive NFT APIs for accessing NFT metadata, transactions, and market data across multiple blockchains. + +## Setup + +1. Get your NFTScan API key from [NFTScan Developer Portal](https://developer.nftscan.com/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/NFTSCAN` + +3. Deploy the NFTScan marketplace functions: + ```bash + dbt run --models nftscan__ nftscan_utils__nftscan_utils + ``` + +## Functions + +### `nftscan.get(path, query_args)` +Make GET requests to NFTScan API endpoints. + +## Examples + +```sql +-- Get NFT collection statistics +SELECT nftscan.get('/api/v2/statistics/collection/eth/0x...', {}); + +-- Get NFTs owned by an address +SELECT nftscan.get('/api/v2/account/own/eth/0x...', {'show_attribute': 'true', 'limit': 100}); + +-- Get NFT transaction history +SELECT nftscan.get('/api/v2/transactions/account/eth/0x...', {'event_type': 'Sale', 'limit': 50}); +``` + +## API Documentation + +- [NFTScan API Documentation](https://developer.nftscan.com/) \ No newline at end of file diff --git a/macros/marketplace/opensea/README.md b/macros/marketplace/opensea/README.md new file mode 100644 index 0000000..ddb1105 --- /dev/null +++ b/macros/marketplace/opensea/README.md @@ -0,0 +1,39 @@ +# OpenSea API Integration + +OpenSea is the world's largest NFT marketplace, providing APIs for accessing NFT collections, listings, sales data, and marketplace activities. + +## Setup + +1. Get your OpenSea API key from [OpenSea Developer Portal](https://docs.opensea.io/reference/api-keys) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/OPENSEA` + +3. Deploy the OpenSea marketplace functions: + ```bash + dbt run --models opensea__ opensea_utils__opensea_utils + ``` + +## Functions + +### `opensea.get(path, query_args)` +Make GET requests to OpenSea API endpoints. + +### `opensea.post(path, body)` +Make POST requests to OpenSea API endpoints. + +## Examples + +```sql +-- Get NFT collection stats +SELECT opensea.get('/api/v2/collections/boredapeyachtclub/stats', {}); + +-- Get NFT listings +SELECT opensea.get('/api/v2/orders/ethereum/seaport/listings', {'limit': 20}); + +-- Get collection events +SELECT opensea.get('/api/v2/events/collection/boredapeyachtclub', {'event_type': 'sale'}); +``` + +## API Documentation + +- [OpenSea API Documentation](https://docs.opensea.io/reference/api-overview) \ No newline at end of file diff --git a/macros/marketplace/playgrounds/README.md b/macros/marketplace/playgrounds/README.md new file mode 100644 index 0000000..18a12af --- /dev/null +++ b/macros/marketplace/playgrounds/README.md @@ -0,0 +1,39 @@ +# Playgrounds API Integration + +Playgrounds provides gaming and entertainment data APIs with access to game statistics, player data, and gaming platform analytics. + +## Setup + +1. Get your Playgrounds API key from [Playgrounds Developer Portal](https://playgrounds.com/developers) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/PLAYGROUNDS` + +3. Deploy the Playgrounds marketplace functions: + ```bash + dbt run --models playgrounds__ playgrounds_utils__playgrounds_utils + ``` + +## Functions + +### `playgrounds.get(path, query_args)` +Make GET requests to Playgrounds API endpoints. + +### `playgrounds.post(path, body)` +Make POST requests to Playgrounds API endpoints. + +## Examples + +```sql +-- Get game statistics +SELECT playgrounds.get('/api/v1/games/stats', {'game_id': 'fortnite'}); + +-- Get player rankings +SELECT playgrounds.get('/api/v1/leaderboards', {'game': 'valorant', 'region': 'na'}); + +-- Get tournament data +SELECT playgrounds.get('/api/v1/tournaments', {'status': 'active', 'limit': 50}); +``` + +## API Documentation + +- [Playgrounds API Documentation](https://docs.playgrounds.com/) \ No newline at end of file diff --git a/macros/marketplace/quicknode/README.md b/macros/marketplace/quicknode/README.md new file mode 100644 index 0000000..5ff4334 --- /dev/null +++ b/macros/marketplace/quicknode/README.md @@ -0,0 +1,44 @@ +# QuickNode API Integration + +QuickNode provides high-performance blockchain infrastructure with RPC endpoints and enhanced APIs for Ethereum, Polygon, Solana, and other networks. + +## Setup + +1. Get your QuickNode endpoint and API key from [QuickNode Dashboard](https://dashboard.quicknode.com/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/QUICKNODE` + +3. Deploy the QuickNode marketplace functions: + ```bash + dbt run --models quicknode__ quicknode_utils__quicknode_utils + ``` + +## Functions + +### `quicknode.get(path, query_args)` +Make GET requests to QuickNode API endpoints. + +### `quicknode.post(path, body)` +Make POST requests to QuickNode API endpoints. + +## Examples + +```sql +-- Get latest block number +SELECT quicknode.post('/rpc', { + 'jsonrpc': '2.0', + 'method': 'eth_blockNumber', + 'params': [], + 'id': 1 +}); + +-- Get NFT metadata +SELECT quicknode.get('/nft/v1/ethereum/nft/0x.../1', {}); + +-- Get token transfers +SELECT quicknode.get('/token/v1/ethereum/transfers', {'address': '0x...', 'limit': 100}); +``` + +## API Documentation + +- [QuickNode API Documentation](https://www.quicknode.com/docs/) \ No newline at end of file diff --git a/macros/marketplace/reservoir/README.md b/macros/marketplace/reservoir/README.md new file mode 100644 index 0000000..46042ff --- /dev/null +++ b/macros/marketplace/reservoir/README.md @@ -0,0 +1,39 @@ +# Reservoir API Integration + +Reservoir provides comprehensive NFT data infrastructure with APIs for accessing real-time NFT market data, collections, sales, and aggregated marketplace information. + +## Setup + +1. Get your Reservoir API key from [Reservoir Dashboard](https://reservoir.tools/dashboard) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/RESERVOIR` + +3. Deploy the Reservoir marketplace functions: + ```bash + dbt run --models reservoir__ reservoir_utils__reservoir_utils + ``` + +## Functions + +### `reservoir.get(path, query_args)` +Make GET requests to Reservoir API endpoints. + +### `reservoir.post(path, body)` +Make POST requests to Reservoir API endpoints. + +## Examples + +```sql +-- Get collection floor prices +SELECT reservoir.get('/collections/v7', {'id': '0x...', 'includeTopBid': 'true'}); + +-- Get recent sales +SELECT reservoir.get('/sales/v6', {'collection': '0x...', 'limit': 100}); + +-- Get token details +SELECT reservoir.get('/tokens/v7', {'collection': '0x...', 'tokenId': '1234'}); +``` + +## API Documentation + +- [Reservoir API Documentation](https://docs.reservoir.tools/) \ No newline at end of file diff --git a/macros/marketplace/slack/README.md b/macros/marketplace/slack/README.md new file mode 100644 index 0000000..31812f8 --- /dev/null +++ b/macros/marketplace/slack/README.md @@ -0,0 +1,294 @@ +# Slack Integration for Livequery + +A straightforward Slack integration that uses secure vault-stored credentials. You construct the payload according to Slack's API spec, and Livequery delivers it using credentials stored in the vault. + +## Prerequisites & Setup + +### Option 1: Webhook Mode (Simpler, No Threading) + +**When to use:** Simple notifications without threading support. + +**Setup Steps:** +1. Go to [Slack Apps](https://api.slack.com/apps) and create a new app +2. Choose "From scratch" and select your workspace +3. Go to "Incoming Webhooks" and toggle "Activate Incoming Webhooks" to On +4. Click "Add New Webhook to Workspace" +5. Select the channel and click "Allow" +6. Copy the webhook URL (starts with `https://hooks.slack.com/services/...`) +7. Store the webhook URL in the vault under a secret name (e.g., 'alerts', 'notifications') +8. Use `slack_utils.post_webhook(secret_name, payload)` + +**Limitations:** +- โŒ No threading support (cannot use `slack.post_reply()`) +- โŒ Cannot send to different channels dynamically +- โœ… Simple setup, no bot permissions needed + +### Option 2: Web API Mode (Full Features + Threading) + +**When to use:** Need threading support, multiple channels, or advanced features. + +**Setup Steps:** +1. Go to [Slack Apps](https://api.slack.com/apps) and create a new app +2. Choose "From scratch" and select your workspace +3. Go to "OAuth & Permissions" in the sidebar +4. Under "Scopes" โ†’ "Bot Token Scopes", add these permissions: + - `chat:write` - Send messages + - `channels:read` - Access public channel information + - `groups:read` - Access private channel information (if needed) +5. Click "Install to Workspace" at the top +6. Click "Allow" to grant permissions +7. Copy the "Bot User OAuth Token" (starts with `xoxb-...`) +8. Store the bot token in the vault (Livequery handles this automatically) +9. **Important:** Invite the bot to your channel: + - Go to your Slack channel + - Type `/invite @YourBotName` (replace with your bot's name) + - Or go to channel settings โ†’ Integrations โ†’ Add apps โ†’ Select your bot +10. Get the channel ID: + - Right-click your channel name โ†’ "Copy Link" + - Extract the ID from URL: `https://yourworkspace.slack.com/archives/C087GJQ1ZHQ` โ†’ `C087GJQ1ZHQ` +11. Use `slack.post_message(channel, payload)` and `slack.post_reply()` for threading + +**Features:** +- โœ… Threading support with `slack.post_reply()` +- โœ… Send to any channel the bot is invited to +- โœ… More control and flexibility +- โŒ Requires bot setup and channel invitations + +## Quick Start + +### Basic Webhook Message +```sql +-- Send a simple message via webhook +SELECT slack_utils.post_webhook( + 'alerts', -- Secret name in vault + { + 'text': 'Hello from Livequery!', + 'username': 'Data Bot' + } +); +``` + +### Web API Message +```sql +-- Send message to a channel +SELECT slack.post_message( + 'C087GJQ1ZHQ', -- Channel ID + { + 'text': 'Pipeline completed!', + 'blocks': [ + { + 'type': 'header', + 'text': { + 'type': 'plain_text', + 'text': ':white_check_mark: Pipeline Success' + } + } + ] + } +); +``` + +### Threading Example +```sql +-- First send main message +WITH main_message AS ( + SELECT slack.post_message( + 'C087GJQ1ZHQ', + {'text': 'Pipeline failed with 3 errors. Details in thread...'} + ) as response +) +-- Then send threaded reply +SELECT slack.post_reply( + 'C087GJQ1ZHQ', + main_message.response:data:ts::STRING, -- Use timestamp from main message + {'text': 'Error 1: Database connection timeout'} +) as thread_response +FROM main_message; +``` + +## Functions Reference + +### `slack_utils.post_webhook(secret_name, payload)` +Send messages via Slack Incoming Webhooks using vault-stored webhook URL. + +**Parameters:** +- `secret_name` - Name of webhook secret stored in vault (e.g., 'alerts', 'notifications') +- `payload` - JSON object following [Slack webhook format](https://api.slack.com/messaging/webhooks) + +**Example:** +```sql +SELECT slack_utils.post_webhook( + 'notifications', + { + 'text': 'dbt run completed successfully', + 'username': 'dbt Bot', + 'icon_emoji': ':white_check_mark:' + } +); +``` + +### `slack.post_message(channel, payload)` +Send messages via Slack Web API (chat.postMessage) using vault-stored bot token. + +**Parameters:** +- `channel` - Channel ID (C...) or name (#channel) +- `payload` - JSON object following [Slack chat.postMessage format](https://api.slack.com/methods/chat.postMessage) + +**Example:** +```sql +SELECT slack.post_message( + 'C087GJQ1ZHQ', + { + 'text': 'Model update complete', + 'attachments': [ + { + 'color': 'good', + 'title': 'Success', + 'fields': [ + {'title': 'Models', 'value': '15', 'short': true}, + {'title': 'Duration', 'value': '2m 30s', 'short': true} + ] + } + ] + } +); +``` + +### `slack.post_reply(channel, thread_ts, payload)` +Send threaded replies via Slack Web API using vault-stored bot token. + +**Parameters:** +- `channel` - Channel ID or name +- `thread_ts` - Parent message timestamp for threading +- `payload` - JSON object following Slack chat.postMessage format + +**Example:** +```sql +SELECT slack.post_reply( + 'C087GJQ1ZHQ', + '1698765432.123456', -- Parent message timestamp + {'text': 'Additional details in this thread'} +); +``` + +### `slack.webhook_send(secret_name, payload)` +Alias for `slack_utils.post_webhook()` - sends webhook messages using vault-stored URL. + +**Parameters:** +- `secret_name` - Name of webhook secret stored in vault +- `payload` - JSON object following Slack webhook format + +### Validation Functions +- `slack_utils.validate_webhook_url(url)` - Check if webhook URL format is valid +- `slack_utils.validate_bot_token(token)` - Check if bot token format is valid +- `slack_utils.validate_channel(channel)` - Check if channel format is valid + +## Vault Configuration + +### Webhook Secrets +Store your webhook URLs in the vault with meaningful names: +- `alerts` - For critical alerts +- `notifications` - For general notifications +- `marketing` - For marketing team updates + +### Bot Token +The bot token is automatically managed by Livequery and stored securely in the vault. You don't need to provide it in function calls. + +## Testing Without Spamming Slack + +### Built-in Tests +The integration includes comprehensive tests that verify functionality without hitting real Slack channels. + +### Manual Testing Options + +#### 1. Test with httpbin.org (Recommended for Development) +```sql +-- Test webhook functionality without hitting Slack +-- (Note: This bypasses vault and uses direct URL for testing) +SELECT slack_utils.post_webhook( + 'test-httpbin', -- Create test secret pointing to httpbin.org + {'text': 'Test message', 'username': 'Test Bot'} +); +``` + +#### 2. Test Workspace (Real Slack Testing) +Create a dedicated test workspace or use a private test channel: +- Store test webhook URLs in vault with names like `test-alerts` +- Use test channel IDs for `post_message()` calls +- Set up separate vault secrets for testing vs production + +## Advanced Usage + +### Rich Message Formatting +```sql +-- Advanced Block Kit message +SELECT slack.post_message( + 'C087GJQ1ZHQ', + { + 'text': 'Data Pipeline Report', + 'blocks': [ + { + 'type': 'header', + 'text': { + 'type': 'plain_text', + 'text': '๐Ÿ“Š Daily Data Pipeline Report' + } + }, + { + 'type': 'section', + 'fields': [ + {'type': 'mrkdwn', 'text': '*Environment:*\nProduction'}, + {'type': 'mrkdwn', 'text': '*Models Run:*\n25'}, + {'type': 'mrkdwn', 'text': '*Duration:*\n12m 34s'}, + {'type': 'mrkdwn', 'text': '*Status:*\nโœ… Success'} + ] + }, + { + 'type': 'actions', + 'elements': [ + { + 'type': 'button', + 'text': {'type': 'plain_text', 'text': 'View Logs'}, + 'url': 'https://your-logs-url.com' + } + ] + } + ] + } +); +``` + +### Error Handling +```sql +-- Check response for errors +WITH slack_result AS ( + SELECT slack_utils.post_webhook( + 'alerts', + {'text': 'Test message'} + ) as response +) +SELECT + response:ok::BOOLEAN as success, + response:error::STRING as error_message, + CASE + WHEN response:ok::BOOLEAN THEN 'Message sent successfully' + ELSE 'Failed: ' || response:error::STRING + END as status +FROM slack_result; +``` + +## How It Works + +1. **Secure credential storage** - Webhook URLs and bot tokens are stored in Livequery's vault +2. **You construct the payload** - Use Slack's official API documentation to build your JSON +3. **Livequery delivers it** - We handle authentication and HTTP requests to Slack +4. **Get the response** - Standard Slack API response with success/error info + +## Slack API Documentation + +- [Webhook Format](https://api.slack.com/messaging/webhooks) - For webhook messages +- [chat.postMessage](https://api.slack.com/methods/chat.postMessage) - For Web API messages +- [Block Kit](https://api.slack.com/block-kit) - For rich interactive messages +- [Message Formatting](https://api.slack.com/reference/surfaces/formatting) - Text formatting guide + +That's it! Secure, simple Slack integration with vault-managed credentials. \ No newline at end of file diff --git a/macros/marketplace/slack/messaging_udfs.yaml.sql b/macros/marketplace/slack/messaging_udfs.yaml.sql new file mode 100644 index 0000000..fbd9ae1 --- /dev/null +++ b/macros/marketplace/slack/messaging_udfs.yaml.sql @@ -0,0 +1,90 @@ +{% macro config_slack_messaging_udfs(schema_name = "slack", utils_schema_name = "slack_utils") -%} +{# + This macro is used to generate API calls to Slack API endpoints +#} + +{# Slack Webhook Messages #} +- name: {{ schema_name }}.webhook_send + signature: + - [WEBHOOK_SECRET_NAME, STRING, "Name of webhook secret in vault (e.g., 'alerts', 'notifications')"] + - [PAYLOAD, OBJECT, Complete Slack message payload according to Slack API spec] + return_type: + - "OBJECT" + options: | + COMMENT = 'Send a message to Slack via webhook [API docs: Webhooks](https://api.slack.com/messaging/webhooks)' + sql: | + SELECT slack_utils.post_webhook( + WEBHOOK_SECRET_NAME, + PAYLOAD + ) as response + +{# Slack Web API Messages #} +- name: {{ schema_name }}.post_message + signature: + - [CHANNEL, STRING, "Slack channel ID (e.g. 'C1234567890')"] + - [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec] + - [BOT_SECRET_NAME, STRING, "Name of bot token secret in vault (optional, default: 'intelligence')"] + return_type: + - "OBJECT" + options: | + COMMENT = 'Send a message to Slack via Web API with custom bot token [API docs: chat.postMessage](https://api.slack.com/methods/chat.postMessage)' + sql: | + SELECT slack_utils.post_message( + CHANNEL, + PAYLOAD, + COALESCE(BOT_SECRET_NAME, 'intelligence') + ) as response + +- name: {{ schema_name }}.post_message + signature: + - [CHANNEL, STRING, "Slack channel ID (e.g. 'C1234567890')"] + - [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec] + return_type: + - "OBJECT" + options: | + COMMENT = 'Send a message to Slack via Web API [API docs: chat.postMessage](https://api.slack.com/methods/chat.postMessage)' + sql: | + SELECT {{ schema_name }}.post_message( + CHANNEL, + PAYLOAD, + 'intelligence' + ) as response + +- name: {{ schema_name }}.post_reply + signature: + - [CHANNEL, STRING, "Slack channel ID (e.g. 'C1234567890')"] + - [THREAD_TS, STRING, Parent message timestamp for threading] + - [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec] + - [BOT_SECRET_NAME, STRING, "Name of bot token secret in vault (optional, default: 'intelligence')"] + return_type: + - "OBJECT" + options: | + COMMENT = 'Send a threaded reply to Slack via Web API with custom bot token [API docs: chat.postMessage](https://api.slack.com/methods/chat.postMessage)' + sql: | + SELECT slack_utils.post_reply( + CHANNEL, + THREAD_TS, + PAYLOAD, + COALESCE(BOT_SECRET_NAME, 'intelligence') + ) as response + + +- name: {{ schema_name }}.post_reply + signature: + - [CHANNEL, STRING, "Slack channel ID (e.g. 'C1234567890')"] + - [THREAD_TS, STRING, Parent message timestamp for threading] + - [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec] + return_type: + - "OBJECT" + options: | + COMMENT = 'Send a threaded reply to Slack via Web API [API docs: chat.postMessage](https://api.slack.com/methods/chat.postMessage)' + sql: | + SELECT {{ schema_name }}.post_reply( + CHANNEL, + THREAD_TS, + PAYLOAD, + 'intelligence' + ) as response + + +{% endmacro %} diff --git a/macros/marketplace/slack/utils_udfs.yaml.sql b/macros/marketplace/slack/utils_udfs.yaml.sql new file mode 100644 index 0000000..df1dabf --- /dev/null +++ b/macros/marketplace/slack/utils_udfs.yaml.sql @@ -0,0 +1,220 @@ +{% macro config_slack_utils_udfs(schema_name = "slack_utils", utils_schema_name = "slack_utils") -%} +{# + This macro is used to generate API calls to Slack API endpoints + #} +- name: {{ schema_name }}.post_webhook + signature: + - [WEBHOOK_SECRET_NAME, STRING, "Name of webhook secret in vault (e.g., 'alerts', 'notifications')"] + - [PAYLOAD, OBJECT, Complete Slack message payload according to Slack API spec] + return_type: + - "OBJECT" + options: | + COMMENT = $$Send a message to Slack via webhook. User provides secret name for webhook URL stored in vault.$$ + sql: | + SELECT CASE + WHEN WEBHOOK_SECRET_NAME IS NULL OR WEBHOOK_SECRET_NAME = '' THEN + OBJECT_CONSTRUCT('ok', false, 'error', 'webhook_secret_name is required') + WHEN PAYLOAD IS NULL THEN + OBJECT_CONSTRUCT('ok', false, 'error', 'payload is required') + ELSE + {% set v2_exists = check_udf_api_v2_exists() %} + {% if v2_exists -%} + live.udf_api_v2( + 'POST', + '{WEBHOOK_URL}', + OBJECT_CONSTRUCT('Content-Type', 'application/json'), + PAYLOAD, + IFF(_utils.udf_whoami() <> CURRENT_USER(), + '_FSC_SYS/SLACK/' || WEBHOOK_SECRET_NAME, + 'Vault/prod/data_platform/slack/' || WEBHOOK_SECRET_NAME), + TRUE + ) + {%- else -%} + live.udf_api( + 'POST', + '{WEBHOOK_URL}', + OBJECT_CONSTRUCT('Content-Type', 'application/json'), + PAYLOAD, + IFF(_utils.udf_whoami() <> CURRENT_USER(), + '_FSC_SYS/SLACK/' || WEBHOOK_SECRET_NAME, + 'Vault/prod/data_platform/slack/' || WEBHOOK_SECRET_NAME) + ) + {%- endif %} + END as response + +- name: {{ schema_name }}.post_message + signature: + - [CHANNEL, STRING, "Slack channel ID (e.g. 'C1234567890')"] + - [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec] + - [BOT_SECRET_NAME, STRING, "Name of bot token secret in vault (optional, default: 'intelligence')"] + return_type: + - "OBJECT" + options: | + COMMENT = $$Send a message to Slack via Web API chat.postMessage with custom bot token. User provides complete payload according to Slack API spec.$$ + sql: | + SELECT CASE + WHEN CHANNEL IS NULL OR CHANNEL = '' THEN + OBJECT_CONSTRUCT('ok', false, 'error', 'channel is required') + WHEN PAYLOAD IS NULL THEN + OBJECT_CONSTRUCT('ok', false, 'error', 'payload is required') + ELSE + {% set v2_exists = check_udf_api_v2_exists() %} + {% if v2_exists -%} + live.udf_api_v2( + 'POST', + 'https://slack.com/api/chat.postMessage', + OBJECT_CONSTRUCT( + 'Authorization', 'Bearer {BOT_TOKEN}', + 'Content-Type', 'application/json' + ), + OBJECT_INSERT(PAYLOAD, 'channel', CHANNEL), + IFF(_utils.udf_whoami() <> CURRENT_USER(), + '_FSC_SYS/SLACK/' || COALESCE(BOT_SECRET_NAME, 'intelligence'), + 'Vault/prod/data_platform/slack/' || COALESCE(BOT_SECRET_NAME, 'intelligence')), + TRUE + ) + {%- else -%} + live.udf_api( + 'POST', + 'https://slack.com/api/chat.postMessage', + OBJECT_CONSTRUCT( + 'Authorization', 'Bearer {BOT_TOKEN}', + 'Content-Type', 'application/json' + ), + OBJECT_INSERT(PAYLOAD, 'channel', CHANNEL), + IFF(_utils.udf_whoami() <> CURRENT_USER(), + '_FSC_SYS/SLACK/' || COALESCE(BOT_SECRET_NAME, 'intelligence'), + 'Vault/prod/data_platform/slack/' || COALESCE(BOT_SECRET_NAME, 'intelligence')) + ) + {%- endif %} + END as response + +- name: {{ schema_name }}.post_message + signature: + - [CHANNEL, STRING, "Slack channel ID (e.g. 'C1234567890')"] + - [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec] + return_type: + - "OBJECT" + options: | + COMMENT = $$Send a message to Slack via Web API chat.postMessage. User provides complete payload according to Slack API spec.$$ + sql: | + SELECT {{ schema_name }}.post_message( + CHANNEL, + PAYLOAD, + 'intelligence' + ) as response + +- name: {{ schema_name }}.post_reply + signature: + - [CHANNEL, STRING, "Slack channel ID (e.g. 'C1234567890')"] + - [THREAD_TS, STRING, Parent message timestamp for threading] + - [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec] + - [BOT_SECRET_NAME, STRING, "Name of bot token secret in vault (optional, default: 'intelligence')"] + return_type: + - "OBJECT" + options: | + COMMENT = $$Send a threaded reply to Slack via Web API with custom bot token. User provides complete payload according to Slack API spec.$$ + sql: | + SELECT CASE + WHEN CHANNEL IS NULL OR CHANNEL = '' THEN + OBJECT_CONSTRUCT('ok', false, 'error', 'channel is required') + WHEN THREAD_TS IS NULL OR THREAD_TS = '' THEN + OBJECT_CONSTRUCT('ok', false, 'error', 'thread_ts is required') + WHEN PAYLOAD IS NULL THEN + OBJECT_CONSTRUCT('ok', false, 'error', 'payload is required') + ELSE + {% set v2_exists = check_udf_api_v2_exists() %} + {% if v2_exists -%} + live.udf_api_v2( + 'POST', + 'https://slack.com/api/chat.postMessage', + OBJECT_CONSTRUCT( + 'Authorization', 'Bearer {BOT_TOKEN}', + 'Content-Type', 'application/json' + ), + OBJECT_INSERT( + OBJECT_INSERT(PAYLOAD, 'channel', CHANNEL), + 'thread_ts', THREAD_TS + ), + IFF(_utils.udf_whoami() <> CURRENT_USER(), + '_FSC_SYS/SLACK/' || COALESCE(BOT_SECRET_NAME, 'intelligence'), + 'Vault/prod/data_platform/slack/' || COALESCE(BOT_SECRET_NAME, 'intelligence')), + TRUE + ) + {%- else -%} + live.udf_api( + 'POST', + 'https://slack.com/api/chat.postMessage', + OBJECT_CONSTRUCT( + 'Authorization', 'Bearer {BOT_TOKEN}', + 'Content-Type', 'application/json' + ), + OBJECT_INSERT( + OBJECT_INSERT(PAYLOAD, 'channel', CHANNEL), + 'thread_ts', THREAD_TS + ), + IFF(_utils.udf_whoami() <> CURRENT_USER(), + '_FSC_SYS/SLACK/' || COALESCE(BOT_SECRET_NAME, 'intelligence'), + 'Vault/prod/data_platform/slack/' || COALESCE(BOT_SECRET_NAME, 'intelligence')) + ) + {%- endif %} + END as response + +- name: {{ schema_name }}.post_reply + signature: + - [CHANNEL, STRING, "Slack channel ID (e.g. 'C1234567890')"] + - [THREAD_TS, STRING, Parent message timestamp for threading] + - [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec] + return_type: + - "OBJECT" + options: | + COMMENT = $$Send a threaded reply to Slack via Web API. User provides complete payload according to Slack API spec.$$ + sql: | + SELECT {{ schema_name }}.post_reply( + CHANNEL, + THREAD_TS, + PAYLOAD, + 'intelligence' + ) as response + +- name: {{ schema_name }}.validate_webhook_url + signature: + - [WEBHOOK_URL, STRING, Webhook URL to validate] + return_type: + - "BOOLEAN" + options: | + COMMENT = $$Validate if a string is a proper Slack webhook URL format.$$ + sql: | + SELECT WEBHOOK_URL IS NOT NULL + AND STARTSWITH(WEBHOOK_URL, 'https://hooks.slack.com/services/') + AND LENGTH(WEBHOOK_URL) > 50 + +- name: {{ schema_name }}.validate_bot_token + signature: + - [BOT_TOKEN, STRING, Bot token to validate] + return_type: + - "BOOLEAN" + options: | + COMMENT = $$Validate if a string is a proper Slack bot token format.$$ + sql: | + SELECT BOT_TOKEN IS NOT NULL + AND STARTSWITH(BOT_TOKEN, 'xoxb-') + AND LENGTH(BOT_TOKEN) > 20 + +- name: {{ schema_name }}.validate_channel + signature: + - [CHANNEL, STRING, "Channel ID to validate"] + return_type: + - "BOOLEAN" + options: | + COMMENT = $$Validate if a string is a proper Slack channel ID format (API requires IDs, not names).$$ + sql: | + SELECT CHANNEL IS NOT NULL + AND LENGTH(CHANNEL) > 0 + AND ( + STARTSWITH(CHANNEL, 'C') OR -- Public channel ID + STARTSWITH(CHANNEL, 'D') OR -- Direct message ID + STARTSWITH(CHANNEL, 'G') -- Private channel/group ID + ) + +{% endmacro %} diff --git a/macros/marketplace/snapshot/README.md b/macros/marketplace/snapshot/README.md new file mode 100644 index 0000000..cbb5f9f --- /dev/null +++ b/macros/marketplace/snapshot/README.md @@ -0,0 +1,45 @@ +# Snapshot API Integration + +Snapshot is a decentralized voting platform that provides APIs for accessing DAO governance data, proposals, votes, and community participation metrics. + +## Setup + +1. Get your Snapshot API key from [Snapshot Hub](https://snapshot.org/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/SNAPSHOT` + +3. Deploy the Snapshot marketplace functions: + ```bash + dbt run --models snapshot__ snapshot_utils__snapshot_utils + ``` + +## Functions + +### `snapshot.get(path, query_args)` +Make GET requests to Snapshot API endpoints. + +### `snapshot.post(path, body)` +Make POST requests to Snapshot GraphQL API endpoints. + +## Examples + +```sql +-- Get DAO spaces +SELECT snapshot.post('/graphql', { + 'query': 'query { spaces(first: 20, orderBy: "created", orderDirection: desc) { id name } }' +}); + +-- Get proposals for a space +SELECT snapshot.post('/graphql', { + 'query': 'query { proposals(first: 10, where: {space: "uniswap"}) { id title state } }' +}); + +-- Get votes for a proposal +SELECT snapshot.post('/graphql', { + 'query': 'query { votes(first: 100, where: {proposal: "proposal_id"}) { voter choice } }' +}); +``` + +## API Documentation + +- [Snapshot API Documentation](https://docs.snapshot.org/) \ No newline at end of file diff --git a/macros/marketplace/solscan/README.md b/macros/marketplace/solscan/README.md new file mode 100644 index 0000000..2c94e28 --- /dev/null +++ b/macros/marketplace/solscan/README.md @@ -0,0 +1,36 @@ +# Solscan API Integration + +Solscan is a leading Solana blockchain explorer providing comprehensive APIs for accessing Solana transaction data, account information, and network statistics. + +## Setup + +1. Get your Solscan API key from [Solscan API Portal](https://pro-api.solscan.io/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/SOLSCAN` + +3. Deploy the Solscan marketplace functions: + ```bash + dbt run --models solscan__ solscan_utils__solscan_utils + ``` + +## Functions + +### `solscan.get(path, query_args)` +Make GET requests to Solscan API endpoints. + +## Examples + +```sql +-- Get account information +SELECT solscan.get('/account', {'address': 'account_address'}); + +-- Get transaction details +SELECT solscan.get('/transaction', {'signature': 'transaction_signature'}); + +-- Get token information +SELECT solscan.get('/token/meta', {'token': 'token_address'}); +``` + +## API Documentation + +- [Solscan API Documentation](https://docs.solscan.io/) \ No newline at end of file diff --git a/macros/marketplace/stakingrewards/README.md b/macros/marketplace/stakingrewards/README.md new file mode 100644 index 0000000..bc6e68f --- /dev/null +++ b/macros/marketplace/stakingrewards/README.md @@ -0,0 +1,36 @@ +# Staking Rewards API Integration + +Staking Rewards provides comprehensive data on cryptocurrency staking opportunities, validator performance, and yield farming across multiple blockchain networks. + +## Setup + +1. Get your Staking Rewards API key from [Staking Rewards API Portal](https://stakingrewards.com/api) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/STAKINGREWARDS` + +3. Deploy the Staking Rewards marketplace functions: + ```bash + dbt run --models stakingrewards__ stakingrewards_utils__stakingrewards_utils + ``` + +## Functions + +### `stakingrewards.get(path, query_args)` +Make GET requests to Staking Rewards API endpoints. + +## Examples + +```sql +-- Get staking assets +SELECT stakingrewards.get('/assets', {'limit': 100}); + +-- Get validator information +SELECT stakingrewards.get('/validators', {'asset': 'ethereum', 'limit': 50}); + +-- Get staking rewards data +SELECT stakingrewards.get('/rewards', {'asset': 'solana', 'timeframe': '30d'}); +``` + +## API Documentation + +- [Staking Rewards API Documentation](https://docs.stakingrewards.com/) \ No newline at end of file diff --git a/macros/marketplace/strangelove/README.md b/macros/marketplace/strangelove/README.md new file mode 100644 index 0000000..d1235cf --- /dev/null +++ b/macros/marketplace/strangelove/README.md @@ -0,0 +1,39 @@ +# Strangelove API Integration + +Strangelove provides blockchain infrastructure and data services for Cosmos ecosystem blockchains, offering APIs for accessing cross-chain data and IBC information. + +## Setup + +1. Get your Strangelove API key from [Strangelove Ventures](https://strangelove.ventures/) + +2. Store the API key in Snowflakerets under `_FSC_SYS/STRANGELOVE` + +3. Deploy the Strangelove marketplace functions: + ```bash + dbt run --models strangelove__ strangelove_utils__strangelove_utils + ``` + +## Functions + +### `strangelove.get(path, query_args)` +Make GET requests to Strangelove API endpoints. + +### `strangelove.post(path, body)` +Make POST requests to Strangelove API endpoints. + +## Examples + +```sql +-- Get Cosmos network data +SELECT strangelove.get('/api/v1/chains', {}); + +-- Get IBC transfer data +SELECT strangelove.get('/api/v1/ibc/transfers', {'chain': 'cosmoshub', 'limit': 100}); + +-- Get validator information +SELECT strangelove.get('/api/v1/validators', {'chain': 'osmosis'}); +``` + +## API Documentation + +- [Strangelove API Documentation](https://docs.strangelove.ventures/) \ No newline at end of file diff --git a/macros/marketplace/subquery/README.md b/macros/marketplace/subquery/README.md new file mode 100644 index 0000000..72c1667 --- /dev/null +++ b/macros/marketplace/subquery/README.md @@ -0,0 +1,45 @@ +# SubQuery API Integration + +SubQuery provides decentralized data indexing infrastructure for Web3, offering APIs to access indexed blockchain data across multiple networks including Polkadot, Ethereum, and Cosmos. + +## Setup + +1. Get your SubQuery API key from [SubQuery Managed Service](https://managedservice.subquery.network/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/SUBQUERY` + +3. Deploy the SubQuery marketplace functions: + ```bash + dbt run --models subquery__ subquery_utils__subquery_utils + ``` + +## Functions + +### `subquery.get(path, query_args)` +Make GET requests to SubQuery API endpoints. + +### `subquery.post(path, body)` +Make POST requests to SubQuery GraphQL API endpoints. + +## Examples + +```sql +-- Get indexed project data +SELECT subquery.post('/graphql', { + 'query': 'query { transfers(first: 10) { id from to value } }' +}); + +-- Get block information +SELECT subquery.post('/graphql', { + 'query': 'query { blocks(first: 5, orderBy: NUMBER_DESC) { id number timestamp } }' +}); + +-- Get account transactions +SELECT subquery.post('/graphql', { + 'query': 'query { accounts(filter: {id: {equalTo: "address"}}) { id transactions { nodes { id } } } }' +}); +``` + +## API Documentation + +- [SubQuery API Documentation](https://academy.subquery.network/) \ No newline at end of file diff --git a/macros/marketplace/topshot/README.md b/macros/marketplace/topshot/README.md new file mode 100644 index 0000000..0fbaffe --- /dev/null +++ b/macros/marketplace/topshot/README.md @@ -0,0 +1,36 @@ +# NBA Top Shot API Integration + +NBA Top Shot is Dapper Labs' basketball NFT platform featuring officially licensed NBA highlights as digital collectible Moments. + +## Setup + +1. Get your NBA Top Shot API key from [Dapper Labs Developer Portal](https://developers.dapperlabs.com/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/TOPSHOT` + +3. Deploy the Top Shot marketplace functions: + ```bash + dbt run --models topshot__ topshot_utils__topshot_utils + ``` + +## Functions + +### `topshot.get(path, query_args)` +Make GET requests to NBA Top Shot API endpoints. + +## Examples + +```sql +-- Get Top Shot collections +SELECT topshot.get('/collections', {}); + +-- Get moment details +SELECT topshot.get('/moments/12345', {}); + +-- Get marketplace listings +SELECT topshot.get('/marketplace/listings', {'player': 'lebron-james', 'limit': 50}); +``` + +## API Documentation + +- [NBA Top Shot API Documentation](https://developers.dapperlabs.com/) \ No newline at end of file diff --git a/macros/marketplace/transpose/README.md b/macros/marketplace/transpose/README.md new file mode 100644 index 0000000..fad4302 --- /dev/null +++ b/macros/marketplace/transpose/README.md @@ -0,0 +1,39 @@ +# Transpose API Integration + +Transpose provides real-time blockchain data infrastructure with APIs for accessing NFT data, DeFi protocols, and on-chain analytics across multiple networks. + +## Setup + +1. Get your Transpose API key from [Transpose Dashboard](https://dashboard.transpose.io/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/TRANSPOSE` + +3. Deploy the Transpose marketplace functions: + ```bash + dbt run --models transpose__ transpose_utils__transpose_utils + ``` + +## Functions + +### `transpose.get(path, query_args)` +Make GET requests to Transpose API endpoints. + +### `transpose.post(path, body)` +Make POST requests to Transpose API endpoints. + +## Examples + +```sql +-- Get NFT collection data +SELECT transpose.get('/v0/ethereum/collections/0x...', {}); + +-- Get account NFTs +SELECT transpose.get('/v0/ethereum/nfts/by-owner', {'owner_address': '0x...', 'limit': 100}); + +-- Get token transfers +SELECT transpose.get('/v0/ethereum/transfers', {'contract_address': '0x...', 'limit': 50}); +``` + +## API Documentation + +- [Transpose API Documentation](https://docs.transpose.io/) \ No newline at end of file diff --git a/macros/marketplace/zapper/README.md b/macros/marketplace/zapper/README.md new file mode 100644 index 0000000..ed2822c --- /dev/null +++ b/macros/marketplace/zapper/README.md @@ -0,0 +1,36 @@ +# Zapper API Integration + +Zapper provides DeFi portfolio tracking and analytics with APIs for accessing wallet balances, DeFi positions, transaction history, and yield farming opportunities. + +## Setup + +1. Get your Zapper API key from [Zapper API Portal](https://api.zapper.fi/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/ZAPPER` + +3. Deploy the Zapper marketplace functions: + ```bash + dbt run --models zapper__ zapper_utils__zapper_utils + ``` + +## Functions + +### `zapper.get(path, query_args)` +Make GET requests to Zapper API endpoints. + +## Examples + +```sql +-- Get wallet token balances +SELECT zapper.get('/v2/balances', {'addresses[]': '0x...', 'networks[]': 'ethereum'}); + +-- Get DeFi protocol positions +SELECT zapper.get('/v2/apps/tokens', {'groupId': 'uniswap-v2', 'addresses[]': '0x...'}); + +-- Get transaction history +SELECT zapper.get('/v2/transactions', {'address': '0x...', 'network': 'ethereum'}); +``` + +## API Documentation + +- [Zapper API Documentation](https://docs.zapper.fi/) \ No newline at end of file diff --git a/macros/marketplace/zettablock/README.md b/macros/marketplace/zettablock/README.md new file mode 100644 index 0000000..c4b75af --- /dev/null +++ b/macros/marketplace/zettablock/README.md @@ -0,0 +1,45 @@ +# ZettaBlock API Integration + +ZettaBlock provides real-time blockchain data infrastructure with GraphQL APIs for accessing multi-chain data, analytics, and custom data indexing. + +## Setup + +1. Get your ZettaBlock API key from [ZettaBlock Console](https://console.zettablock.com/) + +2. Store the API key in Snowflake secrets under `_FSC_SYS/ZETTABLOCK` + +3. Deploy the ZettaBlock marketplace functions: + ```bash + dbt run --models zettablock__ zettablock_utils__zettablock_utils + ``` + +## Functions + +### `zettablock.get(path, query_args)` +Make GET requests to ZettaBlock API endpoints. + +### `zettablock.post(path, body)` +Make POST requests to ZettaBlock GraphQL API endpoints. + +## Examples + +```sql +-- Get blockchain data via GraphQL +SELECT zettablock.post('/graphql', { + 'query': 'query { ethereum { transactions(first: 10) { hash value gasPrice } } }' +}); + +-- Get token information +SELECT zettablock.post('/graphql', { + 'query': 'query { tokens(network: "ethereum", first: 20) { address symbol name } }' +}); + +-- Get DeFi protocol data +SELECT zettablock.post('/graphql', { + 'query': 'query { defi { protocols(first: 10) { name tvl volume24h } } }' +}); +``` + +## API Documentation + +- [ZettaBlock API Documentation](https://docs.zettablock.com/) \ No newline at end of file diff --git a/macros/tests/udfs.sql b/macros/tests/udfs.sql index 63d6774..fc30209 100644 --- a/macros/tests/udfs.sql +++ b/macros/tests/udfs.sql @@ -33,3 +33,33 @@ test AS {%- endif -%} {%- endfor -%} {%- endmacro -%} + +{% macro base_test_udf_without_context(model, udf, args, assertions) %} +{# + Generates a test for a UDF without setting LIVEQUERY_CONTEXT. + #} +{%- set call -%} +{{ target.database }}.{{ udf }}({{ args }}) +{%- endset -%} +, +test AS +( + SELECT + '{{ udf }}' AS test_name + ,[{{ args }}] as parameters + ,{{ call }} AS result +) + {% for assertion in assertions %} + SELECT + test_name, + parameters, + result, + $${{ assertion }}$$ AS assertion, + $$SELECT {{ call ~ "\n" }};$$ AS sql + FROM test + WHERE NOT {{ assertion }} + {%- if not loop.last %} + UNION ALL + {%- endif -%} + {%- endfor -%} +{%- endmacro -%} diff --git a/macros/tests/udtfs.sql b/macros/tests/udtfs.sql new file mode 100644 index 0000000..6a56793 --- /dev/null +++ b/macros/tests/udtfs.sql @@ -0,0 +1,31 @@ +{% macro base_test_udtf(model, udf, args, assertions) %} +{# + Generates a test for a User-Defined Table Function (UDTF). + Unlike scalar UDFs, UDTFs return a table of results. + #} +{%- set call -%} +SELECT * FROM TABLE({{ udf }}({{ args }})) +{%- endset -%} + +WITH test AS +( + SELECT + '{{ udf }}' AS test_name + ,[{{ args }}] as parameters + ,t.* + FROM TABLE({{ udf }}({{ args }})) t +) + +{% for assertion in assertions %} +SELECT + test_name, + parameters, + $${{ assertion }}$$ AS assertion, + $${{ call }}$$ AS sql +FROM test +WHERE NOT {{ assertion }} +{%- if not loop.last %} +UNION ALL +{%- endif -%} +{%- endfor -%} +{% endmacro %} diff --git a/models/deploy/marketplace/claude/claude__.sql b/models/deploy/marketplace/claude/claude__.sql index fb082a6..048f57e 100644 --- a/models/deploy/marketplace/claude/claude__.sql +++ b/models/deploy/marketplace/claude/claude__.sql @@ -1,4 +1,5 @@ -- depends_on: {{ ref('live') }} +-- depends_on: {{ ref('claude_utils__claude_utils') }} {%- set configs = [ config_claude_messages_udfs, config_claude_models_udfs, diff --git a/models/deploy/marketplace/claude/claude_utils__claude_utils.yml b/models/deploy/marketplace/claude/claude_utils__claude_utils.yml index 9449ebf..bb7ddc7 100644 --- a/models/deploy/marketplace/claude/claude_utils__claude_utils.yml +++ b/models/deploy/marketplace/claude/claude_utils__claude_utils.yml @@ -2,10 +2,10 @@ version: 2 models: - name: claude_utils__claude_utils columns: - - name: post + - name: post_api tests: - test_udf: - name: test_claude_utils__post_status_200 + name: test_claude_utils__post_api_status_200 args: > '/v1/messages' , { diff --git a/models/deploy/marketplace/github/github_actions__github_utils.yml b/models/deploy/marketplace/github/github_actions__github_utils.yml index 0053686..e82d108 100644 --- a/models/deploy/marketplace/github/github_actions__github_utils.yml +++ b/models/deploy/marketplace/github/github_actions__github_utils.yml @@ -4,34 +4,270 @@ models: columns: - name: workflows tests: - - test_udf: - name: test_github_actions__workflows_status_200 + - test_udf_without_context: + name: test_github_actions__workflows_with_query + args: > + 'FlipsideCrypto', + 'admin-models', + {'per_page': 5} + assertions: + - result:workflows IS NOT NULL + - result:total_count IS NOT NULL + - test_udf_without_context: + name: test_github_actions__workflows_simple args: > 'FlipsideCrypto', 'admin-models' assertions: - - result:status_code = 200 - - result:error IS NULL + - result:workflows IS NOT NULL + - result:total_count IS NOT NULL + - name: runs tests: - - test_udf: - name: test_github_actions__runs_status_200 + - test_udf_without_context: + name: test_github_actions__runs_with_query args: > 'FlipsideCrypto', 'admin-models', - {} + {'per_page': 10, 'status': 'completed'} assertions: - - result:status_code = 200 - - result:error IS NULL + - result:workflow_runs IS NOT NULL + - result:total_count IS NOT NULL + - test_udf_without_context: + name: test_github_actions__runs_simple + args: > + 'FlipsideCrypto', + 'admin-models' + assertions: + - result:workflow_runs IS NOT NULL + - result:total_count IS NOT NULL + - name: workflow_runs tests: - - test_udf: - name: test_github_actions__workflow_runs_status_200 + - test_udf_without_context: + name: test_github_actions__workflow_runs_with_query args: > 'FlipsideCrypto', 'admin-models', 'dbt_run_dev_refresh.yml', - {} + {'per_page': 5} assertions: - - result:status_code = 200 - - result:error IS NULL + - result:workflow_runs IS NOT NULL + - result:total_count IS NOT NULL + - test_udf_without_context: + name: test_github_actions__workflow_runs_simple + args: > + 'FlipsideCrypto', + 'admin-models', + 'dbt_run_dev_refresh.yml' + assertions: + - result:workflow_runs IS NOT NULL + - result:total_count IS NOT NULL + + - name: workflow_dispatches + tests: + - test_udf_without_context: + name: test_github_actions__workflow_dispatches_with_body + args: > + 'FlipsideCrypto', + 'admin-models', + 'test-workflow.yml', + {'ref': 'main', 'inputs': {'debug': 'true'}} + assertions: + - result IS NOT NULL + - test_udf_without_context: + name: test_github_actions__workflow_dispatches_simple + args: > + 'FlipsideCrypto', + 'admin-models', + 'test-workflow.yml' + assertions: + - result IS NOT NULL + + - name: workflow_enable + tests: + - test_udf_without_context: + name: test_github_actions__workflow_enable + args: > + 'FlipsideCrypto', + 'admin-models', + 'test-workflow.yml' + assertions: + - result IS NOT NULL + + - name: workflow_disable + tests: + - test_udf_without_context: + name: test_github_actions__workflow_disable + args: > + 'FlipsideCrypto', + 'admin-models', + 'test-workflow.yml' + assertions: + - result IS NOT NULL + + - name: workflow_run_logs + tests: + - test_udf_without_context: + name: test_github_actions__workflow_run_logs + args: > + 'FlipsideCrypto', + 'admin-models', + '12345678' + assertions: + - result IS NULL + + - name: job_logs + tests: + - test_udf_without_context: + name: test_github_actions__job_logs + args: > + 'FlipsideCrypto', + 'admin-models', + '87654321' + assertions: + - result IS NULL + + - name: workflow_run_jobs + tests: + - test_udf_without_context: + name: test_github_actions__workflow_run_jobs_with_query + args: > + 'FlipsideCrypto', + 'admin-models', + '12345678', + {'filter': 'latest'} + assertions: + - result:jobs IS NULL + - result:total_count IS NULL + - test_udf_without_context: + name: test_github_actions__workflow_run_jobs_simple + args: > + 'FlipsideCrypto', + 'admin-models', + '12345678' + assertions: + - result:jobs IS NULL + - result:total_count IS NULL + + # Table Function Tests + - name: tf_workflows + tests: + - test_udtf: + name: test_github_actions__tf_workflows_with_query + args: > + 'FlipsideCrypto', + 'admin-models', + {'per_page': 3} + assertions: + - row_count >= 0 + - test_udtf: + name: test_github_actions__tf_workflows_simple + args: > + 'FlipsideCrypto', + 'admin-models' + assertions: + - row_count >= 0 + + - name: tf_runs + tests: + - test_udtf: + name: test_github_actions__tf_runs_with_query + args: > + 'FlipsideCrypto', + 'admin-models', + {'per_page': 5, 'status': 'completed'} + assertions: + - row_count >= 0 + - test_udtf: + name: test_github_actions__tf_runs_simple + args: > + 'FlipsideCrypto', + 'admin-models' + assertions: + - row_count >= 0 + + - name: tf_workflow_runs + tests: + - test_udtf: + name: test_github_actions__tf_workflow_runs_with_query + args: > + 'FlipsideCrypto', + 'admin-models', + 'dbt_run_dev_refresh.yml', + {'per_page': 3} + assertions: + - row_count >= 0 + - test_udtf: + name: test_github_actions__tf_workflow_runs_simple + args: > + 'FlipsideCrypto', + 'admin-models', + 'dbt_run_dev_refresh.yml' + assertions: + - row_count >= 0 + + - name: tf_workflow_run_jobs + tests: + - test_udtf: + name: test_github_actions__tf_workflow_run_jobs_with_query + args: > + 'FlipsideCrypto', + 'admin-models', + '12345678', + {'filter': 'latest'} + assertions: + - row_count >= 0 + - test_udtf: + name: test_github_actions__tf_workflow_run_jobs_simple + args: > + 'FlipsideCrypto', + 'admin-models', + '12345678' + assertions: + - row_count >= 0 + + - name: tf_failed_jobs_with_logs + tests: + - test_udtf: + name: test_github_actions__tf_failed_jobs_with_logs + args: > + 'FlipsideCrypto', + 'admin-models', + '12345678' + assertions: + - row_count >= 0 + + - name: tf_failure_analysis_with_ai + tests: + - test_udtf: + name: test_github_actions__tf_failure_analysis_with_ai_cortex + args: > + 'FlipsideCrypto', + 'admin-models', + '12345678', + 'cortex', + 'mistral-large', + '' + assertions: + - row_count >= 0 + - test_udtf: + name: test_github_actions__tf_failure_analysis_with_ai_custom_prompt + args: > + 'FlipsideCrypto', + 'admin-models', + '12345678', + 'cortex', + 'mistral-7b', + 'Analyze these failures and provide concise recommendations:' + assertions: + - row_count >= 0 + - test_udtf: + name: test_github_actions__tf_failure_analysis_with_ai_default + args: > + 'FlipsideCrypto', + 'admin-models', + '12345678' + assertions: + - row_count >= 0 + diff --git a/models/deploy/marketplace/github/github_utils__github_utils.yml b/models/deploy/marketplace/github/github_utils__github_utils.yml index 689f046..ac31039 100644 --- a/models/deploy/marketplace/github/github_utils__github_utils.yml +++ b/models/deploy/marketplace/github/github_utils__github_utils.yml @@ -4,8 +4,42 @@ models: columns: - name: octocat tests: - - test_udf: + - test_udf_without_context: name: test_github_utils__octocat_status_200 assertions: - result:status_code = 200 - result:error IS NULL + - result:data IS NOT NULL + + - name: headers + tests: + - test_udf_without_context: + name: test_github_utils__headers_format + assertions: + - result IS NOT NULL + - LENGTH(result) > 50 + - CONTAINS(result, 'Authorization') + - CONTAINS(result, 'X-GitHub-Api-Version') + - CONTAINS(result, 'Accept') + + - name: post_api + tests: + - test_udf_without_context: + name: test_github_utils__post_invalid_route + args: > + 'invalid/test/route', + {'test': 'data'} + assertions: + - result:status_code = 404 + - result IS NOT NULL + + - name: put_api + tests: + - test_udf_without_context: + name: test_github_utils__put_invalid_route + args: > + 'invalid/test/route', + {'test': 'data'} + assertions: + - result:status_code = 404 + - result IS NOT NULL diff --git a/models/deploy/marketplace/slack/slack__.sql b/models/deploy/marketplace/slack/slack__.sql new file mode 100644 index 0000000..34332e9 --- /dev/null +++ b/models/deploy/marketplace/slack/slack__.sql @@ -0,0 +1,6 @@ +-- depends_on: {{ ref('live') }} +-- depends_on: {{ ref('slack_utils__slack_utils') }} +{%- set configs = [ + config_slack_messaging_udfs, + ] -%} +{{- ephemeral_deploy_marketplace(configs) -}} diff --git a/models/deploy/marketplace/slack/slack__.yml b/models/deploy/marketplace/slack/slack__.yml new file mode 100644 index 0000000..accdb42 --- /dev/null +++ b/models/deploy/marketplace/slack/slack__.yml @@ -0,0 +1,124 @@ +version: 2 +models: + - name: slack__ + columns: + - name: webhook_send + tests: + - test_udf: + name: test_slack__webhook_send_simple + args: > + 'https://httpbin.org/post', + {'text': 'Hello from Livequery!'} + assertions: + - result:status_code = 200 + - result:data.json.text = 'Hello from Livequery!' + - result IS NOT NULL + - test_udf: + name: test_slack__webhook_send_rich + args: > + 'https://httpbin.org/post', + { + 'text': 'Pipeline completed!', + 'username': 'dbt Bot', + 'icon_emoji': ':bar_chart:', + 'attachments': [ + { + 'color': '#36a64f', + 'title': 'Success', + 'fields': [ + {'title': 'Models', 'value': '5', 'short': true}, + {'title': 'Failed', 'value': '0', 'short': true} + ] + } + ] + } + assertions: + - result:status_code = 200 + - result:data.json.text = 'Pipeline completed!' + - result:data.json.username = 'dbt Bot' + - result IS NOT NULL + + - name: post_message + tests: + - test_udf: + name: test_slack__post_message_simple + args: > + 'C1234567890', + {'text': 'Hello from Livequery!'} + assertions: + - result IS NOT NULL + - test_udf: + name: test_slack__post_message_blocks + args: > + 'C1234567890', + { + 'text': 'Pipeline completed!', + 'blocks': [ + { + 'type': 'header', + 'text': { + 'type': 'plain_text', + 'text': ':white_check_mark: Pipeline Success' + } + }, + { + 'type': 'section', + 'fields': [ + {'type': 'mrkdwn', 'text': '*Repository:*\nFlipsideCrypto/my-repo'}, + {'type': 'mrkdwn', 'text': '*Duration:*\n15m 30s'} + ] + } + ] + } + assertions: + - result IS NOT NULL + + - name: post_reply + tests: + - test_udf: + name: test_slack__post_reply_simple + args: > + 'C1234567890', + '1234567890.123456', + {'text': 'Thread reply from Livequery!'} + assertions: + - result IS NOT NULL + + - name: webhook_send + tests: + - test_udf: + name: test_slack__webhook_send_complex_payload + args: > + 'https://httpbin.org/post', + { + 'text': 'Complex test message', + 'username': 'Test Bot', + 'icon_emoji': ':test_tube:', + 'blocks': [ + { + 'type': 'header', + 'text': { + 'type': 'plain_text', + 'text': '๐Ÿงช Test Results' + } + }, + { + 'type': 'section', + 'text': { + 'type': 'mrkdwn', + 'text': '*All tests passed!* โœ…' + } + } + ], + 'attachments': [ + { + 'color': '#36a64f', + 'blocks': [] + } + ] + } + assertions: + - result:status_code = 200 + - result:data.json.text = 'Complex test message' + - result:data.json.username = 'Test Bot' + - result IS NOT NULL \ No newline at end of file diff --git a/models/deploy/marketplace/slack/slack_utils__slack_utils.sql b/models/deploy/marketplace/slack/slack_utils__slack_utils.sql new file mode 100644 index 0000000..362bb02 --- /dev/null +++ b/models/deploy/marketplace/slack/slack_utils__slack_utils.sql @@ -0,0 +1,5 @@ +-- depends_on: {{ ref('live') }} +{%- set configs = [ + config_slack_utils_udfs, + ] -%} +{{- ephemeral_deploy_marketplace(configs) -}} \ No newline at end of file diff --git a/models/deploy/marketplace/slack/slack_utils__slack_utils.yml b/models/deploy/marketplace/slack/slack_utils__slack_utils.yml new file mode 100644 index 0000000..bde7f64 --- /dev/null +++ b/models/deploy/marketplace/slack/slack_utils__slack_utils.yml @@ -0,0 +1,125 @@ +version: 2 +models: + - name: slack_utils__slack_utils + columns: + - name: post_webhook + tests: + - test_udf_without_context: + name: test_slack_utils__post_webhook_httpbin + args: > + 'https://httpbin.org/post', + {'text': 'Test message from Livequery'} + assertions: + - result:status_code = 200 + - result:data.json.text = 'Test message from Livequery' + - result IS NOT NULL + - test_udf_without_context: + name: test_slack_utils__post_webhook_invalid_url + args: > + 'https://httpbin.org/status/404', + {'text': 'Test message'} + assertions: + - result:status_code = 404 + - result IS NOT NULL + - test_udf_without_context: + name: test_slack_utils__post_webhook_null_payload + args: > + 'https://hooks.slack.com/services/T00000000/B00000000/XXXXXXXXXXXXXXXXXXXXXXXX', + NULL + assertions: + - result:ok = false + - result:error = 'payload is required' + + - name: post_message + tests: + - test_udf_without_context: + name: test_slack_utils__post_message_httpbin + args: > + 'C1234567890', + {'text': 'Test message from Livequery'} + assertions: + - result:status_code = 200 + - result:data.json.text = 'Test message from Livequery' + - result IS NOT NULL + - test_udf_without_context: + name: test_slack_utils__post_message_auth_error + args: > + 'C1234567890', + {'text': 'Test message'} + assertions: + - result:status_code = 200 + - result IS NOT NULL + + - name: post_reply + tests: + - test_udf_without_context: + name: test_slack_utils__post_reply_httpbin + args: > + 'C1234567890', + '1234567890.123456', + {'text': 'Test reply from Livequery'} + assertions: + - result:status_code = 200 + - result:data.json.text = 'Test reply from Livequery' + - result IS NOT NULL + + - name: validate_webhook_url + tests: + - test_udf_without_context: + name: test_slack_utils__validate_webhook_url_valid + args: > + 'https://hooks.slack.com/services/T00000000/B00000000/XXXXXXXXXXXXXXXXXXXXXXXX' + assertions: + - result = true + - test_udf_without_context: + name: test_slack_utils__validate_webhook_url_invalid + args: > + 'https://invalid-url.com/webhook' + assertions: + - result = false + + + - name: validate_channel + tests: + - test_udf_without_context: + name: test_slack_utils__validate_channel_id + args: > + 'C1234567890' + assertions: + - result = true + - test_udf_without_context: + name: test_slack_utils__validate_channel_name + args: > + '#general' + assertions: + - result = false + - test_udf_without_context: + name: test_slack_utils__validate_channel_dm + args: > + 'D1234567890' + assertions: + - result = true + - test_udf_without_context: + name: test_slack_utils__validate_channel_group + args: > + 'G1234567890' + assertions: + - result = true + - test_udf_without_context: + name: test_slack_utils__validate_channel_invalid + args: > + 'invalid-channel' + assertions: + - result = false + - test_udf_without_context: + name: test_slack_utils__validate_channel_null + args: > + NULL + assertions: + - result = false + - test_udf_without_context: + name: test_slack_utils__validate_channel_empty + args: > + '' + assertions: + - result = false \ No newline at end of file diff --git a/selectors.yml b/selectors.yml index 7bc34ab..4349886 100644 --- a/selectors.yml +++ b/selectors.yml @@ -16,3 +16,9 @@ selectors: - livequery_models.deploy.marketplace.apilayer.* # API Endpoints not working - livequery_models.deploy.marketplace.opensea.* # Requite wallet validated API Key - livequery_models.deploy.marketplace.credmark.* # Requires API Key + + - name: test_udfs_without_context + definition: + union: + - method: test_name + value: "*test_udf_without_context*" diff --git a/tests/generic/test_udf.sql b/tests/generic/test_udf.sql index bad4bf8..8857b78 100644 --- a/tests/generic/test_udf.sql +++ b/tests/generic/test_udf.sql @@ -10,3 +10,16 @@ {{ base_test_udf(model, udf, args, assertions) }} {% endtest %} + +{% test test_udf_without_context(model, column_name, args, assertions) %} + {# + This is a generic test for UDFs without setting LIVEQUERY_CONTEXT. + The udfs are deployed using ephemeral models, so we need to + use the ephemeral model name to get the udf name. + #} + {%- set schema = model | replace("__dbt__cte__", "") -%} + {%- set schema = schema.split("__") | first -%} + {%- set udf = schema ~ "." ~ column_name -%} + + {{ base_test_udf_without_context(model, udf, args, assertions) }} +{% endtest %} diff --git a/tests/generic/test_udtf.sql b/tests/generic/test_udtf.sql new file mode 100644 index 0000000..f1b3106 --- /dev/null +++ b/tests/generic/test_udtf.sql @@ -0,0 +1,28 @@ +{% test test_udtf(model, column_name, args, assertions) %} + {%- set schema = model | replace("__dbt__cte__", "") -%} + {%- set schema = schema.split("__") | first -%} + {%- set udf = schema ~ "." ~ column_name -%} + + WITH base_test_data AS + ( + SELECT + '{{ udf }}' AS test_name + ,[{{ args }}] as parameters + ,COUNT(*) OVER () AS row_count + FROM TABLE({{ udf }}({{ args }})) t + LIMIT 1 + ) + + {% for assertion in assertions %} + SELECT + test_name, + parameters, + $${{ assertion }}$$ AS assertion, + $$SELECT * FROM TABLE({{ udf }}({{ args }}))$$ AS sql + FROM base_test_data + WHERE NOT ({{ assertion }}) + {% if not loop.last %} + UNION ALL + {% endif %} + {% endfor %} +{% endtest %}