mirror of
https://github.com/FlipsideCrypto/livequery-models.git
synced 2026-02-06 10:56:46 +00:00
Compare commits
71 Commits
v1.3.1-dev
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f6e472d80c | ||
|
|
430bc25db1 | ||
|
|
bafc04a856 | ||
|
|
8138951db2 | ||
|
|
a3b004d0cc | ||
|
|
b3d6329d32 | ||
|
|
b8dc1b09a4 | ||
|
|
da46dc4a8e | ||
|
|
2651a45b7e | ||
|
|
9f98d2b9b2 | ||
|
|
2c5d0cca67 | ||
|
|
04bc1253f7 | ||
|
|
d01e8d47f8 | ||
|
|
97a2811e8c | ||
|
|
90198898d0 | ||
|
|
bc7b78e592 | ||
|
|
10414a122e | ||
|
|
fc4a6752c1 | ||
|
|
1eecc8f048 | ||
|
|
2bdf24eaf3 | ||
|
|
606ec83c25 | ||
|
|
c8dce1d376 | ||
|
|
6a9f4c343a | ||
|
|
673a55c1b7 | ||
|
|
5d6db27740 | ||
|
|
318fd70418 | ||
|
|
d5f6103f4d | ||
|
|
01bd6dd51e | ||
|
|
29be791268 | ||
|
|
476eee68ec | ||
|
|
5525d9f6c1 | ||
|
|
1e8cf27f67 | ||
|
|
4066365114 | ||
|
|
76661504d2 | ||
|
|
1a40d0705f | ||
|
|
4a5e83e913 | ||
|
|
3594bdbda6 | ||
|
|
1da9b610fb | ||
|
|
39c656ca77 | ||
|
|
6d1407bdff | ||
|
|
1fc41519ef | ||
|
|
3d563d8710 | ||
|
|
80590d8f9d | ||
|
|
0bc02b03a5 | ||
|
|
14cdcb769f | ||
|
|
5e26c60d25 | ||
|
|
e72eec1eae | ||
|
|
213acecd40 | ||
|
|
a844411ef2 | ||
|
|
99412f94c6 | ||
|
|
7fe5cad7d7 | ||
|
|
85749ebb60 | ||
|
|
a150937ac6 | ||
|
|
d323f7c757 | ||
|
|
c35e0fb03b | ||
|
|
fc3071fd4a | ||
|
|
8364f5e052 | ||
|
|
599a5572ea | ||
|
|
0e1602ed6f | ||
|
|
6efedaeafe | ||
|
|
7914a879ca | ||
|
|
314c086b2d | ||
|
|
9915e6980b | ||
|
|
5a27805d36 | ||
|
|
0d763a15cb | ||
|
|
173b022b4c | ||
|
|
a9112f2108 | ||
|
|
2ae5fbfb7e | ||
|
|
6709d4655a | ||
|
|
554550f414 | ||
|
|
89225abbcc |
2
.github/workflows/dbt.yml
vendored
2
.github/workflows/dbt.yml
vendored
@ -51,7 +51,7 @@ jobs:
|
||||
${{ inputs.command }}
|
||||
- name: Store logs
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: logs-${{ inputs.environment }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
path: |
|
||||
|
||||
22
.github/workflows/dbt_integration_test.yml
vendored
22
.github/workflows/dbt_integration_test.yml
vendored
@ -3,14 +3,21 @@ run-name: ${{ github.event.inputs.branch }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
environment:
|
||||
required: false
|
||||
type: string
|
||||
default: hosted
|
||||
schedule:
|
||||
# Runs “Daily at midnight GMT” (see https://crontab.guru)
|
||||
# Runs "Daily at midnight GMT" (see https://crontab.guru)
|
||||
- cron: '0 0 * * *'
|
||||
|
||||
concurrency: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
test:
|
||||
# For scheduled runs, run both environments
|
||||
test-scheduled:
|
||||
if: ${{ github.event_name == 'schedule' }}
|
||||
name: ${{ matrix.environment }}
|
||||
uses: ./.github/workflows/dbt.yml
|
||||
secrets: inherit
|
||||
@ -26,3 +33,14 @@ jobs:
|
||||
warehouse: ${{ matrix.warehouse }}
|
||||
environment: ${{ matrix.environment }}
|
||||
command: dbt test -s test___utils_udf_introspect
|
||||
|
||||
# For manual dispatch, run only the specified environment
|
||||
test-dispatch:
|
||||
if: ${{ github.event_name == 'workflow_dispatch' }}
|
||||
name: ${{ github.event.inputs.environment }}
|
||||
uses: ./.github/workflows/dbt.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
warehouse: ${{ (github.event.inputs.environment == 'workflow_prod' || github.event.inputs.environment == 'workflow_dev') && 'DBT_CLOUD' || 'XSMALL' }}
|
||||
environment: ${{ github.event.inputs.environment == 'workflow_prod' && 'prod' || (github.event.inputs.environment == 'workflow_dev' && 'dev' || github.event.inputs.environment) }}
|
||||
command: dbt test -s test___utils_udf_introspect
|
||||
|
||||
26
.github/workflows/dbt_udf_test.yml
vendored
26
.github/workflows/dbt_udf_test.yml
vendored
@ -1,5 +1,5 @@
|
||||
name: test udfs
|
||||
run-name: ${{ github.event.inputs.branch }}
|
||||
run-name: ${{ github.event_name == 'schedule' && 'prod' || github.event_name == 'workflow_dispatch' && inputs.environment || 'dev' }} - ${{ github.event.inputs.branch || github.event.pull_request.title || 'main' }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
@ -25,20 +25,15 @@ on:
|
||||
schedule:
|
||||
# Runs “Daily at midnight GMT” (see https://crontab.guru)
|
||||
- cron: '0 0 * * *'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'models/deploy/marketplace/**'
|
||||
- 'macros/marketplace/**'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
scheduled:
|
||||
uses: ./.github/workflows/dbt.yml
|
||||
if: github.event_name == 'schedule' || github.event_name == 'push'
|
||||
secrets: inherit
|
||||
with:
|
||||
warehouse: ${{ vars.WAREHOUSE }}
|
||||
environment: prod
|
||||
command: dbt test --selector test_udfs --threads 24
|
||||
|
||||
dispatched:
|
||||
uses: ./.github/workflows/dbt.yml
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
@ -46,4 +41,13 @@ jobs:
|
||||
with:
|
||||
warehouse: ${{ inputs.warehouse }}
|
||||
environment: ${{ inputs.environment }}
|
||||
command: dbt test --selector test_udfs --threads 24
|
||||
command: dbt test --selector test_udfs_without_context --threads 5
|
||||
|
||||
pull_request:
|
||||
uses: ./.github/workflows/dbt.yml
|
||||
if: github.event_name == 'pull_request'
|
||||
secrets: inherit
|
||||
with:
|
||||
warehouse: ${{ vars.WAREHOUSE }}
|
||||
environment: dev
|
||||
command: dbt test --selector test_udfs_without_context --threads 5
|
||||
|
||||
10
.gitignore
vendored
10
.gitignore
vendored
@ -18,4 +18,12 @@ dbt-env/
|
||||
.env
|
||||
.*
|
||||
# KEEP
|
||||
!.github/
|
||||
!.github/
|
||||
|
||||
# Ignore Python bytecode files
|
||||
*.pyc
|
||||
__pycache__/
|
||||
|
||||
# Claude
|
||||
.claude/
|
||||
CLAUDE.md
|
||||
|
||||
28
Makefile
Normal file
28
Makefile
Normal file
@ -0,0 +1,28 @@
|
||||
SHELL := /bin/bash
|
||||
|
||||
dbt-console:
|
||||
docker-compose run dbt_console
|
||||
|
||||
.PHONY: dbt-console
|
||||
|
||||
rm_logs:
|
||||
@if [ -d logs ]; then \
|
||||
rm -r logs 2>/dev/null || echo "Warning: Could not remove logs directory"; \
|
||||
else \
|
||||
echo "Logs directory does not exist"; \
|
||||
fi
|
||||
|
||||
|
||||
deploy_core: rm_logs
|
||||
dbt run --select livequery_models.deploy.core.live \
|
||||
--vars '{UPDATE_UDFS_AND_SPS: true}' \
|
||||
--profiles-dir ~/.dbt \
|
||||
--profile livequery \
|
||||
--target dev
|
||||
|
||||
test_core: rm_logs
|
||||
dbt test --select live \
|
||||
--profiles-dir ~/.dbt \
|
||||
--profile livequery \
|
||||
--target dev
|
||||
|
||||
@ -469,7 +469,7 @@ When False, none of the on-run-start macros are executed on model run
|
||||
Default values are False
|
||||
|
||||
* Usage:
|
||||
`dbt run --var '{"UPDATE_UDFS_AND_SPS":True}' -m ...`
|
||||
`dbt run --vars '{"UPDATE_UDFS_AND_SPS":True}' -m ...`
|
||||
|
||||
Dropping and creating udfs can also be done without running a model:
|
||||
|
||||
|
||||
@ -28,10 +28,6 @@ models:
|
||||
livequery_models:
|
||||
deploy:
|
||||
+materialized: ephemeral
|
||||
core:
|
||||
+tags: core
|
||||
secrets:
|
||||
+enabled: '{{ true if env_var("ENABLE_SNOWFLAKE_SECRETS", "") else false }}'
|
||||
evm:
|
||||
+tags: evm
|
||||
marketplace:
|
||||
@ -71,24 +67,28 @@ vars:
|
||||
API_INTEGRATION: '{{ var("config")[target.name]["API_INTEGRATION"] }}'
|
||||
EXTERNAL_FUNCTION_URI: '{{ var("config")[target.name]["EXTERNAL_FUNCTION_URI"] }}'
|
||||
ROLES: '{{ var("config")[target.name]["ROLES"] }}'
|
||||
MAX_BATCH_ROWS: '{{ var("config")[target.name]["MAX_BATCH_ROWS"] }}'
|
||||
|
||||
config:
|
||||
# The keys correspond to dbt profiles and are case sensitive
|
||||
dev:
|
||||
API_INTEGRATION: AWS_LIVE_QUERY_STG
|
||||
EXTERNAL_FUNCTION_URI: u5z0tu43sc.execute-api.us-east-1.amazonaws.com/stg/
|
||||
API_INTEGRATION: AWS_LIVEQUERY_API_STG_V2
|
||||
EXTERNAL_FUNCTION_URI: xi7ila2p66.execute-api.us-east-1.amazonaws.com/stg/
|
||||
ROLES:
|
||||
- INTERNAL_DEV
|
||||
MAX_BATCH_ROWS: 10
|
||||
prod:
|
||||
API_INTEGRATION: AWS_LIVE_QUERY
|
||||
EXTERNAL_FUNCTION_URI: bqco8lkjsb.execute-api.us-east-1.amazonaws.com/prod/
|
||||
API_INTEGRATION: AWS_LIVEQUERY_API_PROD_V2
|
||||
EXTERNAL_FUNCTION_URI: ae41qu1azg.execute-api.us-east-1.amazonaws.com/prod/
|
||||
ROLES:
|
||||
- VELOCITY_INTERNAL
|
||||
- VELOCITY_ETHEREUM
|
||||
- INTERNAL_DEV
|
||||
- BI_ANALYTICS_READER
|
||||
MAX_BATCH_ROWS: 10
|
||||
hosted:
|
||||
API_INTEGRATION: AWS_LIVEQUERY
|
||||
EXTERNAL_FUNCTION_URI: dlcb3tpiz8.execute-api.us-east-1.amazonaws.com/hosted/
|
||||
ROLES:
|
||||
- DATA_READER
|
||||
MAX_BATCH_ROWS: 10
|
||||
|
||||
331
macros/alerts/README.md
Normal file
331
macros/alerts/README.md
Normal file
@ -0,0 +1,331 @@
|
||||
# GitHub Actions Slack Notifications
|
||||
|
||||
This directory contains a fast dbt macro system for sending intelligent Slack notifications from GitHub Actions workflows with AI-powered failure analysis.
|
||||
|
||||
## Features
|
||||
|
||||
- **⚡ Fast Execution**: Pure SQL dbt macro (no Python overhead)
|
||||
- **🤖 AI-Powered Analysis**: Automatic failure analysis using Cortex or Claude AI
|
||||
- **💬 Rich Slack Messages**: Beautiful Block Kit formatted notifications with color-coded sidebars
|
||||
- **🧵 Auto-Threading**: Detailed job logs posted as threaded replies
|
||||
- **🎨 Custom Bot Appearance**: Custom names, emojis, and avatars
|
||||
- **🔗 Dual Delivery Methods**: Support for both webhooks and bot tokens
|
||||
- **📊 Comprehensive Details**: Job failures, logs, and actionable links
|
||||
|
||||
## Quick Setup
|
||||
|
||||
The `failed_gha_slack_alert` macro is ready to use immediately - no deployment required!
|
||||
|
||||
### Setup Options
|
||||
|
||||
#### Option 1: Bot Token Method (Recommended)
|
||||
|
||||
1. Create a Slack bot with `chat:write` permissions
|
||||
2. Get the channel ID from Slack (e.g., `C1234567890` - not channel name)
|
||||
3. Store bot token in Livequery vault at `_FSC_SYS/SLACK/intelligence`
|
||||
4. Add this step to your GitHub Actions workflow:
|
||||
|
||||
```yaml
|
||||
- name: Notify Slack on Failure
|
||||
if: failure()
|
||||
run: |
|
||||
dbt run-operation failed_gha_slack_alert --vars '{
|
||||
"owner": "${{ github.repository_owner }}",
|
||||
"repo": "${{ github.event.repository.name }}",
|
||||
"run_id": "${{ github.run_id }}",
|
||||
"slack_channel": "C1234567890"
|
||||
}' --target dev
|
||||
```
|
||||
|
||||
#### Option 2: Webhook Method (Simple Setup)
|
||||
|
||||
1. Create a Slack webhook URL in your workspace
|
||||
2. Store webhook URL in Livequery vault at `_FSC_SYS/SLACK/alerts`
|
||||
3. Add this step to your GitHub Actions workflow:
|
||||
|
||||
```yaml
|
||||
- name: Notify Slack on Failure
|
||||
if: failure()
|
||||
run: |
|
||||
dbt run-operation failed_gha_slack_alert --vars '{
|
||||
"owner": "${{ github.repository_owner }}",
|
||||
"repo": "${{ github.event.repository.name }}",
|
||||
"run_id": "${{ github.run_id }}",
|
||||
"webhook_secret_name": "alerts"
|
||||
}' --target dev
|
||||
```
|
||||
|
||||
## Configuration Options
|
||||
|
||||
### Core Parameters
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
|-----------|------|----------|-------------|
|
||||
| `owner` | string | ✅ | GitHub repository owner |
|
||||
| `repo` | string | ✅ | GitHub repository name |
|
||||
| `run_id` | string | ✅ | GitHub Actions run ID |
|
||||
| `slack_channel` | string | ✅* | Slack channel ID (e.g., 'C1234567890') - required for bot token method |
|
||||
| `webhook_secret_name` | string | ✅* | Webhook vault secret name - required for webhook method |
|
||||
|
||||
### AI & Analysis
|
||||
|
||||
| Parameter | Type | Default | Description |
|
||||
|-----------|------|---------|-------------|
|
||||
| `enable_ai_analysis` | boolean | `true` | Enable AI failure analysis |
|
||||
| `ai_provider` | string | `'cortex'` | AI provider: `'cortex'` (Snowflake built-in AI) |
|
||||
| `model_name` | string | `'mistral-large'` | **Required for Cortex**: `'mistral-large'`, `'mistral-7b'`, `'llama2-70b-chat'`, `'mixtral-8x7b'` |
|
||||
| `ai_prompt` | string | `''` | Custom AI analysis prompt (leave empty for default) |
|
||||
|
||||
### Threading & Appearance
|
||||
|
||||
| Parameter | Type | Default | Description |
|
||||
|-----------|------|---------|-------------|
|
||||
| `enable_auto_threading` | boolean | `false` | Auto-post detailed job logs as thread replies |
|
||||
| `username` | string | `'GitHub Actions Bot'` | Custom bot display name |
|
||||
| `icon_emoji` | string | `':github:'` | Bot emoji (e.g., `:robot_face:`, `:stellar:`) |
|
||||
| `icon_url` | string | `none` | Bot avatar URL (overrides icon_emoji) |
|
||||
| `bot_secret_name` | string | `'intelligence'` | Name of bot token secret in vault |
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### Basic Notification
|
||||
|
||||
```bash
|
||||
dbt run-operation failed_gha_slack_alert --vars '{
|
||||
"owner": "FlipsideCrypto",
|
||||
"repo": "streamline-snowflake",
|
||||
"run_id": "16729602656",
|
||||
"slack_channel": "C087GJQ1ZHQ"
|
||||
}' --target dev
|
||||
```
|
||||
|
||||
### AI Analysis with Custom Bot
|
||||
|
||||
```bash
|
||||
dbt run-operation failed_gha_slack_alert --vars '{
|
||||
"owner": "FlipsideCrypto",
|
||||
"repo": "streamline-snowflake",
|
||||
"run_id": "16729602656",
|
||||
"slack_channel": "C087GJQ1ZHQ",
|
||||
"enable_ai_analysis": true,
|
||||
"ai_provider": "cortex",
|
||||
"model_name": "mistral-7b",
|
||||
"username": "CI/CD Alert Bot",
|
||||
"icon_emoji": ":robot_face:"
|
||||
}' --target dev
|
||||
```
|
||||
|
||||
### Auto-Threading with Custom Prompt
|
||||
|
||||
```bash
|
||||
dbt run-operation failed_gha_slack_alert --vars '{
|
||||
"owner": "FlipsideCrypto",
|
||||
"repo": "streamline-snowflake",
|
||||
"run_id": "16729602656",
|
||||
"slack_channel": "C087GJQ1ZHQ",
|
||||
"enable_ai_analysis": true,
|
||||
"ai_provider": "cortex",
|
||||
"model_name": "mixtral-8x7b",
|
||||
"ai_prompt": "Focus on dependency issues and provide quick fixes:",
|
||||
"enable_auto_threading": true,
|
||||
"username": "Pipeline Monitor",
|
||||
"icon_emoji": ":stellar:"
|
||||
}' --target dev
|
||||
```
|
||||
|
||||
### Webhook Method
|
||||
|
||||
```bash
|
||||
dbt run-operation failed_gha_slack_alert --vars '{
|
||||
"owner": "FlipsideCrypto",
|
||||
"repo": "streamline-snowflake",
|
||||
"run_id": "16729602656",
|
||||
"webhook_secret_name": "prod-alerts",
|
||||
"enable_ai_analysis": true,
|
||||
"ai_provider": "cortex",
|
||||
"model_name": "mistral-large",
|
||||
"username": "Production Monitor",
|
||||
"icon_emoji": ":package:"
|
||||
}' --target dev
|
||||
```
|
||||
|
||||
### GitHub Actions Integration
|
||||
|
||||
```yaml
|
||||
- name: Notify Slack on Failure
|
||||
if: failure()
|
||||
run: |
|
||||
dbt run-operation failed_gha_slack_alert --vars '{
|
||||
"owner": "${{ github.repository_owner }}",
|
||||
"repo": "${{ github.event.repository.name }}",
|
||||
"run_id": "${{ github.run_id }}",
|
||||
"slack_channel": "C087GJQ1ZHQ",
|
||||
"enable_ai_analysis": true,
|
||||
"ai_provider": "cortex",
|
||||
"model_name": "mistral-large",
|
||||
"enable_auto_threading": true,
|
||||
"username": "GitHub Actions",
|
||||
"icon_emoji": ":github:"
|
||||
}' --target dev
|
||||
```
|
||||
|
||||
## Message Format
|
||||
|
||||
### Failure Messages Include
|
||||
|
||||
- **🔴 Red Sidebar**: Visual failure indicator
|
||||
- **Header**: Repository name with failure indicator (❌)
|
||||
- **Basic Info**: Run ID, failed job count, workflow name
|
||||
- **🤖 AI Analysis**: Intelligent failure analysis with common patterns, root causes, and action items
|
||||
- **🔗 Action Button**: Direct link to workflow run
|
||||
- **🧵 Threading** (if enabled): Individual job details and logs as thread replies
|
||||
|
||||
### Success Messages Include
|
||||
|
||||
- **🟢 Green Sidebar**: Visual success indicator
|
||||
- **Header**: Repository name with success indicator (✅)
|
||||
- **Basic Info**: Run ID, workflow name, success status
|
||||
- **🔗 Action Button**: Direct link to workflow run
|
||||
|
||||
## AI Analysis
|
||||
|
||||
The macro supports Snowflake's Cortex AI for intelligent failure analysis:
|
||||
|
||||
### Cortex (Default)
|
||||
|
||||
- Uses Snowflake's built-in Cortex AI
|
||||
- **Requires `model_name` parameter** to specify which model to use
|
||||
- Available models: `'mistral-large'`, `'mistral-7b'`, `'llama2-70b-chat'`, `'mixtral-8x7b'`
|
||||
- Automatically analyzes logs and provides insights
|
||||
- Custom prompts supported via `ai_prompt` parameter
|
||||
|
||||
Enable AI analysis with:
|
||||
|
||||
```yaml
|
||||
"enable_ai_analysis": true,
|
||||
"ai_provider": "cortex",
|
||||
"model_name": "mistral-large", # Required!
|
||||
"ai_prompt": "Focus on the most critical issues:" # Optional
|
||||
```
|
||||
|
||||
## Environment Variables & Vault Setup
|
||||
|
||||
### Webhook Method
|
||||
|
||||
- `SLACK_WEBHOOK_URL`: Your Slack webhook URL (GitHub secret)
|
||||
|
||||
### Bot Token Method
|
||||
|
||||
- **No environment variables required!**
|
||||
- Bot tokens are stored in Livequery vault at: `_FSC_SYS/SLACK/{bot_secret_name}`
|
||||
- Channel ID provided as parameter in macro call
|
||||
|
||||
### Vault Paths for Bot Tokens
|
||||
|
||||
Store your bot tokens in these vault locations:
|
||||
|
||||
- `prod/livequery/slack/intelligence` (default)
|
||||
- `prod/livequery/alerts` (custom)
|
||||
- `prod/livequery/<your bot's name>` (custom)
|
||||
|
||||
** The `_FSC/SYS/..` will not work anymore, because we are not able to access studio to store `CREDENTIALS` anymore. So the context + `_FSC/SYS/...` is deprecated. It's in the sql code for backward compatability.
|
||||
|
||||
### How to Get Slack Channel IDs
|
||||
|
||||
1. **Right-click method**: Right-click channel → Copy → Copy link (ID is in URL)
|
||||
2. **API method**: Use `conversations.list` endpoint
|
||||
3. **App method**: Channel IDs appear in URLs like `/C1234567890/`
|
||||
|
||||
### Security Notes
|
||||
|
||||
- Never hardcode secrets in your workflow files
|
||||
- Use GitHub's encrypted secrets for webhook URLs
|
||||
- Bot tokens automatically managed through Livequery vault system
|
||||
- Channel IDs are not sensitive and can be stored in code
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **No notification sent**: Check webhook URL or channel ID parameter
|
||||
2. **Invalid channel ID**: Must use channel ID (C1234567890), not name (#channel)
|
||||
3. **AI analysis missing**: Ensure GitHub Actions integration is properly set up
|
||||
4. **Message formatting issues**: Verify JSON syntax in custom_message parameter
|
||||
5. **Bot permissions**: Ensure bot has `chat:write` scope for target channel
|
||||
6. **Vault access**: Verify bot token stored at correct vault path
|
||||
|
||||
### Debug Mode
|
||||
|
||||
Add this step before the notification to debug issues:
|
||||
|
||||
```yaml
|
||||
- name: Debug Notification
|
||||
run: |
|
||||
echo "Owner: ${{ github.repository_owner }}"
|
||||
echo "Repo: ${{ github.event.repository.name }}"
|
||||
echo "Run ID: ${{ github.run_id }}"
|
||||
echo "Channel: C1234567890" # Your actual channel ID
|
||||
```
|
||||
|
||||
### Channel ID Validation
|
||||
|
||||
Test if your channel ID is valid:
|
||||
|
||||
```sql
|
||||
SELECT slack_utils.validate_channel('C1234567890') as is_valid;
|
||||
-- Should return true for valid channel IDs
|
||||
```
|
||||
|
||||
## Integration with Livequery
|
||||
|
||||
This macro integrates with Livequery's marketplace UDFs:
|
||||
|
||||
- **`slack_utils.post_webhook()`**: For webhook-based notifications
|
||||
- **`slack.post_message()`** & **`slack.post_reply()`**: For bot token messaging with threading
|
||||
- **`github_actions.tf_failure_analysis_with_ai()`**: For AI-powered failure analysis
|
||||
|
||||
### UDF Function Signatures
|
||||
|
||||
```sql
|
||||
-- Webhook (backward compatible)
|
||||
slack_utils.post_webhook(webhook_secret_name, payload)
|
||||
|
||||
-- Bot messaging (new parameter-based)
|
||||
slack.post_message(channel_id, payload, bot_secret_name)
|
||||
slack.post_reply(channel_id, thread_ts, payload, bot_secret_name)
|
||||
|
||||
-- Or use 2-parameter versions (uses 'intelligence' bot token)
|
||||
slack.post_message(channel_id, payload)
|
||||
slack.post_reply(channel_id, thread_ts, payload)
|
||||
```
|
||||
|
||||
Ensure these UDFs are deployed before using the notification macro.
|
||||
|
||||
## Performance & Benefits
|
||||
|
||||
### ⚡ **Lightning Fast Execution**
|
||||
|
||||
- **Pure SQL**: No Python interpreter overhead
|
||||
- **Direct UDF calls**: Leverages Livequery's optimized marketplace functions
|
||||
- **Single transaction**: All operations in one dbt run-operation call
|
||||
- **Instant feedback**: Real-time execution with immediate Slack delivery
|
||||
|
||||
### 🎯 **Production Ready**
|
||||
|
||||
- **Reliable**: Battle-tested with GitHub Actions workflows
|
||||
- **Scalable**: Handles multiple failed jobs with threading
|
||||
- **Secure**: Vault-based credential management
|
||||
- **Flexible**: Supports both webhook and bot token methods
|
||||
|
||||
### 🤖 **Intelligent Analysis**
|
||||
|
||||
- **AI-Powered**: Cortex and Claude integration for failure analysis
|
||||
- **Actionable Insights**: Common patterns, root causes, and prioritized action items
|
||||
- **Context-Aware**: Includes job names, workflow details, and error logs
|
||||
- **Formatted for Slack**: Optimized mrkdwn formatting for better readability
|
||||
|
||||
The `failed_gha_slack_alert` macro provides enterprise-grade Slack notifications with zero deployment overhead and lightning-fast performance.
|
||||
|
||||
## Examples Repository
|
||||
|
||||
See [our examples repository](https://github.com/FlipsideCrypto/livequery-examples) for complete workflow configurations and advanced usage patterns.
|
||||
309
macros/alerts/slack_alert.sql
Normal file
309
macros/alerts/slack_alert.sql
Normal file
@ -0,0 +1,309 @@
|
||||
{% macro failed_gha_slack_alert() %}
|
||||
|
||||
{# Get parameters from vars #}
|
||||
{%- set owner = var('owner') -%}
|
||||
{%- set repo = var('repo') -%}
|
||||
{%- set run_id = var('run_id') -%}
|
||||
{%- set slack_channel = var('slack_channel', none) -%}
|
||||
{%- set enable_ai_analysis = var('enable_ai_analysis', true) -%}
|
||||
{%- set ai_provider = var('ai_provider', 'cortex') -%}
|
||||
{%- set model_name = var('model_name', 'mistral-large') -%}
|
||||
{%- set ai_prompt = var('ai_prompt', '') -%}
|
||||
{%- set enable_auto_threading = var('enable_auto_threading', false) -%}
|
||||
{%- set bot_secret_name = var('bot_secret_name', 'intelligence') -%}
|
||||
{%- set webhook_secret_name = var('webhook_secret_name', none) -%}
|
||||
{%- set username = var('username', 'GitHub Actions Bot') -%}
|
||||
{%- set icon_emoji = var('icon_emoji', ':github:') -%}
|
||||
{%- set icon_url = var('icon_url', none) -%}
|
||||
|
||||
{%- set webhook_url = env_var('SLACK_WEBHOOK_URL', '') -%}
|
||||
{%- set use_webhook = webhook_url != '' and webhook_secret_name -%}
|
||||
|
||||
|
||||
{# Check if we have a valid slack channel #}
|
||||
{%- if slack_channel -%}
|
||||
{{ log("Using bot token method with channel: " ~ slack_channel, true) }}
|
||||
{%- set use_webhook = false -%}
|
||||
{%- elif not use_webhook -%}
|
||||
{{ log("Error: Either SLACK_WEBHOOK_URL with webhook_secret_name or slack_channel must be provided", true) }}
|
||||
{{ return("") }}
|
||||
{%- endif -%}
|
||||
|
||||
{%- if enable_ai_analysis -%}
|
||||
{# Get failure data with AI analysis #}
|
||||
{% set failure_query %}
|
||||
SELECT
|
||||
run_id,
|
||||
ai_analysis,
|
||||
total_failures,
|
||||
failure_metadata
|
||||
FROM TABLE(github_actions.tf_failure_analysis_with_ai('{{ owner }}', '{{ repo }}', '{{ run_id }}', '{{ ai_provider }}', '{{ model_name }}', '{{ ai_prompt }}'))
|
||||
{% endset %}
|
||||
|
||||
{%- set failure_results = run_query(failure_query) -%}
|
||||
{%- set failure_data = failure_results.rows[0] if failure_results.rows else [] -%}
|
||||
|
||||
{%- if failure_data -%}
|
||||
{%- set total_failures = failure_data[2] -%}
|
||||
{%- set ai_analysis = failure_data[1] -%}
|
||||
{%- set failure_metadata = fromjson(failure_data[3]) if failure_data[3] else [] -%}
|
||||
{%- else -%}
|
||||
{%- set total_failures = 0 -%}
|
||||
{%- set ai_analysis = none -%}
|
||||
{%- set failure_metadata = [] -%}
|
||||
{%- endif -%}
|
||||
{%- else -%}
|
||||
{# Get basic failure data without AI #}
|
||||
{% set basic_query %}
|
||||
SELECT
|
||||
COUNT(*) as total_failures,
|
||||
MAX(workflow_name) as workflow_name,
|
||||
ARRAY_AGG(OBJECT_CONSTRUCT(
|
||||
'workflow_name', workflow_name,
|
||||
'job_name', job_name,
|
||||
'job_id', job_id,
|
||||
'job_url', job_url,
|
||||
'logs_preview', ARRAY_TO_STRING(failed_step_logs, '\n')
|
||||
)) as failure_metadata
|
||||
FROM TABLE(github_actions.tf_failed_jobs_with_logs('{{ owner }}', '{{ repo }}', '{{ run_id }}'))
|
||||
{% endset %}
|
||||
|
||||
{%- set basic_results = run_query(basic_query) -%}
|
||||
{%- set basic_data = basic_results.rows[0] if basic_results.rows else [] -%}
|
||||
|
||||
{%- if basic_data -%}
|
||||
{%- set total_failures = basic_data[0] -%}
|
||||
{%- set ai_analysis = none -%}
|
||||
{%- set failure_metadata = fromjson(basic_data[2]) if basic_data[2] else [] -%}
|
||||
{%- else -%}
|
||||
{%- set total_failures = 0 -%}
|
||||
{%- set ai_analysis = none -%}
|
||||
{%- set failure_metadata = [] -%}
|
||||
{%- endif -%}
|
||||
{%- endif -%}
|
||||
|
||||
{# Extract workflow name #}
|
||||
{%- set workflow_name = failure_metadata[0].workflow_name if failure_metadata else repo -%}
|
||||
|
||||
{# Build Slack message #}
|
||||
{%- if total_failures == 0 -%}
|
||||
{# Success message #}
|
||||
{%- set message_blocks = [
|
||||
{
|
||||
'type': 'header',
|
||||
'text': {'type': 'plain_text', 'text': '✅ ' ~ workflow_name ~ ' - Success'}
|
||||
},
|
||||
{
|
||||
'type': 'section',
|
||||
'fields': [
|
||||
{'type': 'mrkdwn', 'text': '*Run ID:* ' ~ run_id},
|
||||
{'type': 'mrkdwn', 'text': '*Workflow:* ' ~ workflow_name},
|
||||
{'type': 'mrkdwn', 'text': '*Status:* Success'}
|
||||
]
|
||||
},
|
||||
{
|
||||
'type': 'actions',
|
||||
'elements': [{
|
||||
'type': 'button',
|
||||
'text': {'type': 'plain_text', 'text': 'View Workflow'},
|
||||
'url': 'https://github.com/' ~ owner ~ '/' ~ repo ~ '/actions/runs/' ~ run_id,
|
||||
'style': 'primary'
|
||||
}]
|
||||
}
|
||||
] -%}
|
||||
|
||||
{%- set message_payload = {
|
||||
'text': '✅ GitHub Actions Success: ' ~ repo,
|
||||
'attachments': [{
|
||||
'color': '#36a64f',
|
||||
'blocks': message_blocks
|
||||
}]
|
||||
} -%}
|
||||
|
||||
{# Add customization for success messages at root level #}
|
||||
{%- if username and username != 'none' -%}
|
||||
{%- do message_payload.update({'username': username}) -%}
|
||||
{%- endif -%}
|
||||
{%- if icon_url and icon_url != 'none' and icon_url != '' -%}
|
||||
{%- do message_payload.update({'icon_url': icon_url}) -%}
|
||||
{%- elif icon_emoji and icon_emoji != 'none' -%}
|
||||
{%- do message_payload.update({'icon_emoji': icon_emoji}) -%}
|
||||
{%- endif -%}
|
||||
{%- else -%}
|
||||
{# Failure message #}
|
||||
{%- set message_blocks = [
|
||||
{
|
||||
'type': 'header',
|
||||
'text': {'type': 'plain_text', 'text': ':red_circle: ' ~ workflow_name ~ ' - Failed'}
|
||||
},
|
||||
{
|
||||
'type': 'section',
|
||||
'fields': [
|
||||
{'type': 'mrkdwn', 'text': '*Run ID:* ' ~ run_id},
|
||||
{'type': 'mrkdwn', 'text': '*Workflow:* ' ~ workflow_name},
|
||||
{'type': 'mrkdwn', 'text': '*Failed Jobs:* ' ~ total_failures}
|
||||
]
|
||||
}
|
||||
] -%}
|
||||
|
||||
{# Add AI analysis if available #}
|
||||
{%- if enable_ai_analysis and ai_analysis -%}
|
||||
{%- do message_blocks.append({
|
||||
'type': 'section',
|
||||
'text': {
|
||||
'type': 'mrkdwn',
|
||||
'text': '*🤖 AI Analysis:*\n' ~ ai_analysis[:2900]
|
||||
}
|
||||
}) -%}
|
||||
{%- endif -%}
|
||||
|
||||
{# Add action button #}
|
||||
{%- do message_blocks.append({
|
||||
'type': 'actions',
|
||||
'elements': [{
|
||||
'type': 'button',
|
||||
'text': {'type': 'plain_text', 'text': 'View Workflow'},
|
||||
'url': 'https://github.com/' ~ owner ~ '/' ~ repo ~ '/actions/runs/' ~ run_id,
|
||||
'style': 'danger'
|
||||
}]
|
||||
}) -%}
|
||||
|
||||
{%- set message_payload = {
|
||||
'text': '❌ GitHub Actions Failed: ' ~ repo,
|
||||
'attachments': [{
|
||||
'color': '#d63638',
|
||||
'blocks': message_blocks
|
||||
}]
|
||||
} -%}
|
||||
|
||||
{# Add customization for failure messages at root level #}
|
||||
{%- if username and username != 'none' -%}
|
||||
{%- do message_payload.update({'username': username}) -%}
|
||||
{%- endif -%}
|
||||
{%- if icon_url and icon_url != 'none' and icon_url != '' -%}
|
||||
{%- do message_payload.update({'icon_url': icon_url}) -%}
|
||||
{%- elif icon_emoji and icon_emoji != 'none' -%}
|
||||
{%- do message_payload.update({'icon_emoji': icon_emoji}) -%}
|
||||
{%- endif -%}
|
||||
{%- endif -%}
|
||||
|
||||
{# Send message #}
|
||||
{%- if use_webhook -%}
|
||||
{% set send_query %}
|
||||
SELECT slack_utils.post_webhook('{{ webhook_secret_name }}', PARSE_JSON($${{ message_payload | tojson }}$$)) as result
|
||||
{% endset %}
|
||||
{%- else -%}
|
||||
{% set send_query %}
|
||||
SELECT slack.post_message('{{ slack_channel }}', PARSE_JSON($${{ message_payload | tojson }}$$), '{{ bot_secret_name }}') as result
|
||||
{% endset %}
|
||||
{%- endif -%}
|
||||
|
||||
{%- set result = run_query(send_query) -%}
|
||||
{{ log("Main message sent successfully", true) }}
|
||||
|
||||
{# Handle threading for failures #}
|
||||
{%- if enable_auto_threading and total_failures > 0 and not use_webhook and slack_channel -%}
|
||||
{%- set main_response = fromjson(result.rows[0][0]) -%}
|
||||
{%- set main_thread_ts = main_response.ts or (main_response.data and main_response.data.ts) -%}
|
||||
|
||||
{{ log("Starting threading with " ~ failure_metadata|length ~ " jobs", true) }}
|
||||
|
||||
{%- for job_meta in failure_metadata -%}
|
||||
{%- set job_name = job_meta.job_name -%}
|
||||
{%- set job_url = job_meta.job_url -%}
|
||||
{%- set logs_preview = job_meta.logs_preview -%}
|
||||
|
||||
{# Post job summary in thread #}
|
||||
{%- set job_summary = {
|
||||
'text': 'Job Details: ' ~ job_name,
|
||||
'attachments': [{
|
||||
'color': '#d63638',
|
||||
'blocks': [
|
||||
{
|
||||
'type': 'section',
|
||||
'fields': [
|
||||
{'type': 'mrkdwn', 'text': '*Job:* ' ~ job_name},
|
||||
{'type': 'mrkdwn', 'text': '*Status:* failure'}
|
||||
]
|
||||
},
|
||||
{
|
||||
'type': 'actions',
|
||||
'elements': [{
|
||||
'type': 'button',
|
||||
'text': {'type': 'plain_text', 'text': 'View Job'},
|
||||
'url': job_url,
|
||||
'style': 'danger'
|
||||
}]
|
||||
}
|
||||
]
|
||||
}]
|
||||
} -%}
|
||||
|
||||
{# Add customization to thread messages #}
|
||||
{%- if username and username != 'none' -%}
|
||||
{%- do job_summary.update({'username': username}) -%}
|
||||
{%- endif -%}
|
||||
{%- if icon_url and icon_url != 'none' and icon_url != '' -%}
|
||||
{%- do job_summary.update({'icon_url': icon_url}) -%}
|
||||
{%- elif icon_emoji and icon_emoji != 'none' -%}
|
||||
{%- do job_summary.update({'icon_emoji': icon_emoji}) -%}
|
||||
{%- endif -%}
|
||||
|
||||
{% set job_thread_query %}
|
||||
SELECT slack.post_reply('{{ slack_channel }}', '{{ main_thread_ts }}', PARSE_JSON($${{ job_summary | tojson }}$$), '{{ bot_secret_name }}') as result
|
||||
{% endset %}
|
||||
|
||||
{%- set job_result = run_query(job_thread_query) -%}
|
||||
|
||||
{# Post logs as additional thread replies if available - split long logs #}
|
||||
{%- if logs_preview and logs_preview != '' -%}
|
||||
{%- set max_chunk_size = 2900 -%}
|
||||
{%- set log_chunks = [] -%}
|
||||
|
||||
{# Split logs into chunks #}
|
||||
{%- for i in range(0, logs_preview|length, max_chunk_size) -%}
|
||||
{%- set chunk = logs_preview[i:i+max_chunk_size] -%}
|
||||
{%- do log_chunks.append(chunk) -%}
|
||||
{%- endfor -%}
|
||||
|
||||
{# Send each chunk as a separate thread message #}
|
||||
{%- for chunk_idx in range(log_chunks|length) -%}
|
||||
{%- set chunk = log_chunks[chunk_idx] -%}
|
||||
{%- set chunk_header = '' -%}
|
||||
|
||||
{# Add chunk header if multiple chunks #}
|
||||
{%- if log_chunks|length > 1 -%}
|
||||
{%- set chunk_header = '📋 Logs (' ~ (chunk_idx + 1) ~ '/' ~ log_chunks|length ~ '):\n' -%}
|
||||
{%- else -%}
|
||||
{%- set chunk_header = '📋 Logs:\n' -%}
|
||||
{%- endif -%}
|
||||
|
||||
{%- set log_message = {'text': chunk_header ~ '```\n' ~ chunk ~ '\n```'} -%}
|
||||
|
||||
{# Add customization to log thread messages #}
|
||||
{%- if username and username != 'none' -%}
|
||||
{%- do log_message.update({'username': username}) -%}
|
||||
{%- endif -%}
|
||||
{%- if icon_url and icon_url != 'none' and icon_url != '' -%}
|
||||
{%- do log_message.update({'icon_url': icon_url}) -%}
|
||||
{%- elif icon_emoji and icon_emoji != 'none' -%}
|
||||
{%- do log_message.update({'icon_emoji': icon_emoji}) -%}
|
||||
{%- endif -%}
|
||||
|
||||
{% set log_thread_query %}
|
||||
SELECT slack.post_reply('{{ slack_channel }}', '{{ main_thread_ts }}', PARSE_JSON($${{ log_message | tojson }}$$), '{{ bot_secret_name }}') as result
|
||||
{% endset %}
|
||||
|
||||
{%- set log_result = run_query(log_thread_query) -%}
|
||||
{%- endfor -%}
|
||||
{%- endif -%}
|
||||
|
||||
{{ log("Posted thread for job: " ~ job_name, true) }}
|
||||
{%- endfor -%}
|
||||
|
||||
{{ log("Threading completed for " ~ failure_metadata|length ~ " jobs", true) }}
|
||||
{%- else -%}
|
||||
{{ log("Message sent: " ~ result.rows[0][0] if result.rows else "No response", true) }}
|
||||
{%- endif -%}
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,5 +1,21 @@
|
||||
{% macro config_core__live(schema="_live") %}
|
||||
|
||||
- name: {{ schema }}.udf_api_batched
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [url, STRING]
|
||||
- [headers, OBJECT]
|
||||
- [DATA, VARIANT]
|
||||
- [user_id, STRING]
|
||||
- [SECRET, STRING]
|
||||
return_type: VARIANT
|
||||
func_type: EXTERNAL
|
||||
api_integration: '{{ var("API_INTEGRATION") }}'
|
||||
max_batch_rows: '{{ var("MAX_BATCH_ROWS") }}'
|
||||
options: |
|
||||
NOT NULL
|
||||
sql: udf_api
|
||||
|
||||
- name: {{ schema }}.udf_api
|
||||
signature:
|
||||
- [method, STRING]
|
||||
@ -13,6 +29,44 @@
|
||||
api_integration: '{{ var("API_INTEGRATION") }}'
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
sql: udf_api
|
||||
{% endmacro %}
|
||||
|
||||
{% if is_udf_api_v2_compatible() %}
|
||||
- name: {{ schema }}.udf_api_sync
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [url, STRING]
|
||||
- [headers, OBJECT]
|
||||
- [DATA, VARIANT]
|
||||
- [user_id, STRING]
|
||||
- [SECRET, STRING]
|
||||
return_type: VARIANT
|
||||
func_type: EXTERNAL
|
||||
api_integration: '{{ var("API_INTEGRATION") }}'
|
||||
max_batch_rows: '1'
|
||||
headers:
|
||||
- 'fsc-quantum-execution-mode': 'sync'
|
||||
options: |
|
||||
NOT NULL
|
||||
sql: 'v2/udf_api'
|
||||
|
||||
- name: {{ schema }}.udf_api_async
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [url, STRING]
|
||||
- [headers, OBJECT]
|
||||
- [DATA, VARIANT]
|
||||
- [user_id, STRING]
|
||||
- [SECRET, STRING]
|
||||
return_type: VARIANT
|
||||
func_type: EXTERNAL
|
||||
api_integration: '{{ var("API_INTEGRATION") }}'
|
||||
max_batch_rows: '1'
|
||||
headers:
|
||||
- 'fsc-quantum-execution-mode': 'async'
|
||||
options: |
|
||||
NOT NULL
|
||||
sql: 'v2/udf_api'
|
||||
{% endif %}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
@ -132,6 +132,14 @@ def transform_base58_to_hex(base58):
|
||||
base_count = len(ALPHABET)
|
||||
|
||||
num = 0
|
||||
leading_zeros = 0
|
||||
|
||||
for char in base58:
|
||||
if char == '1':
|
||||
leading_zeros += 1
|
||||
else:
|
||||
break
|
||||
|
||||
for char in base58:
|
||||
num *= base_count
|
||||
if char in ALPHABET:
|
||||
@ -144,7 +152,9 @@ def transform_base58_to_hex(base58):
|
||||
if len(hex_string) % 2 != 0:
|
||||
hex_string = '0' + hex_string
|
||||
|
||||
return '0x' + hex_string
|
||||
hex_leading_zeros = '00' * leading_zeros
|
||||
|
||||
return '0x' + hex_leading_zeros + hex_string
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
@ -226,7 +236,7 @@ def transform_hex_to_bech32(hex, hrp=''):
|
||||
return 'Data conversion failed'
|
||||
|
||||
checksum = bech32_create_checksum(hrp, data5bit)
|
||||
|
||||
|
||||
return hrp + '1' + ''.join([CHARSET[d] for d in data5bit + checksum])
|
||||
|
||||
{% endmacro %}
|
||||
@ -250,14 +260,14 @@ def int_to_binary(num):
|
||||
if inverted_string[i] == "1" and carry == 1:
|
||||
result = "0" + result
|
||||
elif inverted_string[i] == "0" and carry == 1:
|
||||
result = "1" + result
|
||||
result = "1" + result
|
||||
carry = 0
|
||||
else:
|
||||
result = inverted_string[i] + result
|
||||
|
||||
binary_string = result
|
||||
binary_string = result
|
||||
|
||||
return binary_string
|
||||
return binary_string
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
@ -268,7 +278,7 @@ def binary_to_int(binary):
|
||||
for char in binary:
|
||||
if char not in "01":
|
||||
raise ValueError("Input string must be a valid binary string.")
|
||||
|
||||
|
||||
integer = 0
|
||||
|
||||
for i, digit in enumerate(binary[::-1]):
|
||||
@ -277,5 +287,39 @@ def binary_to_int(binary):
|
||||
integer += digit_int * 2**i
|
||||
|
||||
return str(integer)
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_redirect_s3_presigned_url() %}
|
||||
import requests
|
||||
import json
|
||||
import gzip
|
||||
import io
|
||||
|
||||
def process_request(url):
|
||||
resp = requests.get(url)
|
||||
content = resp.content
|
||||
|
||||
# Decompress if URL contains .json.gz
|
||||
if '.json.gz' in url:
|
||||
try:
|
||||
# Decompress the gzipped content
|
||||
with gzip.GzipFile(fileobj=io.BytesIO(content), mode='rb') as f:
|
||||
content = f.read()
|
||||
except Exception as e:
|
||||
return {"error": "Failed to decompress gzip data", "message": str(e)}
|
||||
|
||||
# Try to parse as JSON
|
||||
try:
|
||||
text_content = content.decode('utf-8')
|
||||
return json.loads(text_content)
|
||||
except (json.JSONDecodeError, UnicodeDecodeError):
|
||||
# If not JSON or not valid UTF-8, return as string or base64
|
||||
try:
|
||||
# Try to return as string if its valid text
|
||||
return content.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
# For binary data, return base64
|
||||
import base64
|
||||
return base64.b64encode(content).decode('ascii')
|
||||
{% endmacro %}
|
||||
|
||||
@ -1,5 +1,26 @@
|
||||
{% macro config_core_live(schema="live") %}
|
||||
|
||||
- name: {{ schema }}.udf_api_batched
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [url, STRING]
|
||||
- [headers, OBJECT]
|
||||
- [data, VARIANT]
|
||||
- [secret_name, STRING]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
VOLATILE
|
||||
sql: |
|
||||
SELECT
|
||||
_live.UDF_API(
|
||||
method,
|
||||
url,
|
||||
headers,
|
||||
data,
|
||||
_utils.UDF_WHOAMI(),
|
||||
secret_name
|
||||
)
|
||||
|
||||
- name: {{ schema }}.udf_api
|
||||
signature:
|
||||
- [method, STRING]
|
||||
@ -9,8 +30,6 @@
|
||||
- [secret_name, STRING]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
sql: |
|
||||
SELECT
|
||||
@ -22,6 +41,7 @@
|
||||
_utils.UDF_WHOAMI(),
|
||||
secret_name
|
||||
)
|
||||
|
||||
- name: {{ schema }}.udf_api
|
||||
signature:
|
||||
- [method, STRING]
|
||||
@ -30,8 +50,6 @@
|
||||
- [data, VARIANT]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
sql: |
|
||||
SELECT
|
||||
@ -49,8 +67,6 @@
|
||||
- [data, VARIANT]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
sql: |
|
||||
SELECT
|
||||
@ -69,8 +85,6 @@
|
||||
- [secret_name, STRING]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
sql: |
|
||||
SELECT
|
||||
@ -87,8 +101,6 @@
|
||||
- [url, STRING]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
sql: |
|
||||
SELECT
|
||||
@ -96,7 +108,7 @@
|
||||
'GET',
|
||||
url,
|
||||
{},
|
||||
{},
|
||||
NULL,
|
||||
_utils.UDF_WHOAMI(),
|
||||
''
|
||||
)
|
||||
@ -106,8 +118,6 @@
|
||||
- [secret_name, STRING]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
sql: |
|
||||
SELECT
|
||||
@ -128,8 +138,6 @@
|
||||
- [parameters, VARIANT]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
COMMENT = $$Executes an JSON RPC call on a blockchain.$$
|
||||
sql: |
|
||||
@ -141,9 +149,267 @@
|
||||
func_type: EXTERNAL
|
||||
api_integration: '{{ var("API_INTEGRATION") }}'
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
COMMENT = $$Returns a list of allowed domains.$$
|
||||
sql: allowed
|
||||
{% endmacro %}
|
||||
|
||||
{% if is_udf_api_v2_compatible() %}
|
||||
- name: {{ schema }}.udf_api_v2
|
||||
signature:
|
||||
- [url, STRING]
|
||||
- [headers, OBJECT]
|
||||
- [secret_name, STRING]
|
||||
- [is_async, BOOLEAN]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
VOLATILE
|
||||
COMMENT = $$Executes an LiveQuery Sync or Async External Function.$$
|
||||
sql: |
|
||||
SELECT
|
||||
CASE is_async
|
||||
WHEN TRUE
|
||||
THEN
|
||||
utils.udf_redirect_s3_presigned_url(
|
||||
_live.udf_api_async(
|
||||
'GET', URL, HEADERS, {}, _utils.UDF_WHOAMI(), SECRET_NAME
|
||||
):s3_presigned_url :: STRING
|
||||
):data[0][1]
|
||||
ELSE
|
||||
_live.udf_api_sync(
|
||||
'GET', URL, HEADERS, {}, _utils.UDF_WHOAMI(), SECRET_NAME
|
||||
)
|
||||
END
|
||||
|
||||
- name: {{ schema }}.udf_api_v2
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [url, STRING]
|
||||
- [headers, OBJECT]
|
||||
- [data, VARIANT]
|
||||
- [secret_name, STRING]
|
||||
- [is_async, BOOLEAN]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
VOLATILE
|
||||
COMMENT = $$Executes an LiveQuery Sync or Async External Function.$$
|
||||
sql: |
|
||||
SELECT
|
||||
CASE is_async
|
||||
WHEN TRUE
|
||||
THEN
|
||||
utils.udf_redirect_s3_presigned_url(
|
||||
_live.udf_api_async(
|
||||
METHOD, URL, HEADERS, DATA, _utils.UDF_WHOAMI(), SECRET_NAME
|
||||
):s3_presigned_url :: STRING
|
||||
):data[0][1]
|
||||
ELSE
|
||||
_live.udf_api_sync(
|
||||
METHOD, URL, HEADERS, DATA, _utils.UDF_WHOAMI(), SECRET_NAME
|
||||
)
|
||||
END
|
||||
|
||||
- name: {{ schema }}.udf_api_v2
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [url, STRING]
|
||||
- [headers, OBJECT]
|
||||
- [data, VARIANT]
|
||||
- [is_async, BOOLEAN]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
VOLATILE
|
||||
COMMENT = $$Executes an LiveQuery Sync or Async External Function.$$
|
||||
sql: |
|
||||
SELECT
|
||||
CASE is_async
|
||||
WHEN TRUE
|
||||
THEN
|
||||
utils.udf_redirect_s3_presigned_url(
|
||||
_live.udf_api_async(
|
||||
METHOD, URL, HEADERS, DATA, _utils.UDF_WHOAMI(), ''
|
||||
):s3_presigned_url :: STRING
|
||||
):data[0][1]
|
||||
ELSE
|
||||
_live.udf_api_sync(
|
||||
METHOD, URL, HEADERS, DATA, _utils.UDF_WHOAMI(), ''
|
||||
)
|
||||
END
|
||||
|
||||
- name: {{ schema }}.udf_api_v2
|
||||
signature:
|
||||
- [url, STRING]
|
||||
- [data, VARIANT]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
VOLATILE
|
||||
COMMENT = $$Executes a Quick Post LiveQuery Sync External Function.$$
|
||||
sql: |
|
||||
SELECT
|
||||
_live.udf_api_sync(
|
||||
'POST',
|
||||
url,
|
||||
{'Content-Type': 'application/json'},
|
||||
data,
|
||||
_utils.UDF_WHOAMI(),
|
||||
''
|
||||
)
|
||||
|
||||
- name: {{ schema }}.udf_api_v2
|
||||
signature:
|
||||
- [url, STRING]
|
||||
- [data, VARIANT]
|
||||
- [is_async, BOOLEAN]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
VOLATILE
|
||||
COMMENT = $$Executes an LiveQuery Sync or Async External Function.$$
|
||||
sql: |
|
||||
SELECT
|
||||
CASE is_async
|
||||
WHEN TRUE
|
||||
THEN
|
||||
utils.udf_redirect_s3_presigned_url(
|
||||
_live.udf_api_async(
|
||||
'GET', URL, {'Content-Type': 'application/json'}, data, _utils.UDF_WHOAMI(), ''
|
||||
):s3_presigned_url :: STRING
|
||||
):data[0][1]
|
||||
ELSE
|
||||
_live.udf_api_sync(
|
||||
'GET', URL, {'Content-Type': 'application/json'}, data, _utils.UDF_WHOAMI(), ''
|
||||
)
|
||||
END
|
||||
|
||||
- name: {{ schema }}.udf_api_v2
|
||||
signature:
|
||||
- [url, STRING]
|
||||
- [data, VARIANT]
|
||||
- [secret_name, STRING]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
VOLATILE
|
||||
COMMENT = $$Executes a Quick Post LiveQuery Sync External Function.$$
|
||||
sql: |
|
||||
SELECT
|
||||
_live.udf_api_sync(
|
||||
'POST',
|
||||
url,
|
||||
{'Content-Type': 'application/json'},
|
||||
data,
|
||||
_utils.UDF_WHOAMI(),
|
||||
secret_name
|
||||
)
|
||||
|
||||
- name: {{ schema }}.udf_api_v2
|
||||
signature:
|
||||
- [url, STRING]
|
||||
- [data, VARIANT]
|
||||
- [secret_name, STRING]
|
||||
- [is_async, BOOLEAN]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
VOLATILE
|
||||
COMMENT = $$Executes an LiveQuery Sync or Async External Function.$$
|
||||
sql: |
|
||||
SELECT
|
||||
CASE is_async
|
||||
WHEN TRUE
|
||||
THEN
|
||||
utils.udf_redirect_s3_presigned_url(
|
||||
_live.udf_api_async(
|
||||
'GET', URL, {'Content-Type': 'application/json'}, data, _utils.UDF_WHOAMI(), secret_name
|
||||
):s3_presigned_url :: STRING
|
||||
):data[0][1]
|
||||
ELSE
|
||||
_live.udf_api_sync(
|
||||
'GET', URL, {'Content-Type': 'application/json'}, data, _utils.UDF_WHOAMI(), secret_name
|
||||
)
|
||||
END
|
||||
|
||||
- name: {{ schema }}.udf_api_v2
|
||||
signature:
|
||||
- [url, STRING]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
VOLATILE
|
||||
COMMENT = $$Executes a Quick GET LiveQuery Sync External Function.$$
|
||||
sql: |
|
||||
SELECT
|
||||
_live.udf_api_sync(
|
||||
'GET',
|
||||
url,
|
||||
{},
|
||||
NULL,
|
||||
_utils.UDF_WHOAMI(),
|
||||
''
|
||||
)
|
||||
|
||||
- name: {{ schema }}.udf_api_v2
|
||||
signature:
|
||||
- [url, STRING]
|
||||
- [is_async, BOOLEAN]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
VOLATILE
|
||||
COMMENT = $$Executes an LiveQuery Sync or Async External Function.$$
|
||||
sql: |
|
||||
SELECT
|
||||
CASE is_async
|
||||
WHEN TRUE
|
||||
THEN
|
||||
utils.udf_redirect_s3_presigned_url(
|
||||
_live.udf_api_async(
|
||||
'GET', URL, {'Content-Type': 'application/json'}, {}, _utils.UDF_WHOAMI(), ''
|
||||
):s3_presigned_url :: STRING
|
||||
):data[0][1]
|
||||
ELSE
|
||||
_live.udf_api_sync(
|
||||
'GET', URL, {'Content-Type': 'application/json'}, {}, _utils.UDF_WHOAMI(), ''
|
||||
)
|
||||
END
|
||||
|
||||
- name: {{ schema }}.udf_api_v2
|
||||
signature:
|
||||
- [url, STRING]
|
||||
- [secret_name, STRING]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
VOLATILE
|
||||
COMMENT = $$Executes a Quick GET LiveQuery Sync External Function.$$
|
||||
sql: |
|
||||
SELECT
|
||||
_live.udf_api_sync(
|
||||
'GET',
|
||||
url,
|
||||
{},
|
||||
{},
|
||||
_utils.UDF_WHOAMI(),
|
||||
secret_name
|
||||
)
|
||||
|
||||
- name: {{ schema }}.udf_api_v2
|
||||
signature:
|
||||
- [url, STRING]
|
||||
- [secret_name, STRING]
|
||||
- [is_async, BOOLEAN]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
VOLATILE
|
||||
COMMENT = $$Executes an LiveQuery Sync or Async External Function.$$
|
||||
sql: |
|
||||
SELECT
|
||||
CASE is_async
|
||||
WHEN TRUE
|
||||
THEN
|
||||
utils.udf_redirect_s3_presigned_url(
|
||||
_live.udf_api_async(
|
||||
'GET', URL, {'Content-Type': 'application/json'}, {}, _utils.UDF_WHOAMI(), secret_name
|
||||
):s3_presigned_url :: STRING
|
||||
):data[0][1]
|
||||
ELSE
|
||||
_live.udf_api_sync(
|
||||
'GET', URL, {'Content-Type': 'application/json'}, {}, _utils.UDF_WHOAMI(), secret_name
|
||||
)
|
||||
END
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
|
||||
@ -24,7 +24,7 @@
|
||||
LANGUAGE PYTHON
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
RUNTIME_VERSION = '3.8'
|
||||
RUNTIME_VERSION = '3.10'
|
||||
HANDLER = 'hex_to_int'
|
||||
sql: |
|
||||
{{ python_hex_to_int() | indent(4) }}
|
||||
@ -38,7 +38,7 @@
|
||||
LANGUAGE PYTHON
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
RUNTIME_VERSION = '3.8'
|
||||
RUNTIME_VERSION = '3.10'
|
||||
HANDLER = 'hex_to_int'
|
||||
sql: |
|
||||
{{ python_udf_hex_to_int_with_encoding() | indent(4) }}
|
||||
@ -48,7 +48,7 @@
|
||||
return_type: TEXT
|
||||
options: |
|
||||
LANGUAGE PYTHON
|
||||
RUNTIME_VERSION = '3.8'
|
||||
RUNTIME_VERSION = '3.10'
|
||||
HANDLER = 'get_simplified_signature'
|
||||
sql: |
|
||||
{{ create_udf_evm_text_signature() | indent(4) }}
|
||||
@ -58,7 +58,7 @@
|
||||
return_type: TEXT
|
||||
options: |
|
||||
LANGUAGE PYTHON
|
||||
RUNTIME_VERSION = '3.8'
|
||||
RUNTIME_VERSION = '3.10'
|
||||
PACKAGES = ('pycryptodome==3.15.0')
|
||||
HANDLER = 'udf_encode'
|
||||
sql: |
|
||||
@ -150,7 +150,7 @@
|
||||
LANGUAGE PYTHON
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
RUNTIME_VERSION = '3.8'
|
||||
RUNTIME_VERSION = '3.10'
|
||||
COMMENT=$$Pthon (function)[https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode] to convert an object to a URL query string.$$
|
||||
HANDLER = 'object_to_url_query_string'
|
||||
sql: |
|
||||
@ -165,7 +165,7 @@
|
||||
LANGUAGE PYTHON
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
RUNTIME_VERSION = '3.8'
|
||||
RUNTIME_VERSION = '3.10'
|
||||
COMMENT=$$Pthon (function)[https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode] to convert an array to a URL query string.$$
|
||||
HANDLER = 'object_to_url_query_string'
|
||||
sql: |
|
||||
@ -210,7 +210,7 @@
|
||||
NULL
|
||||
LANGUAGE PYTHON
|
||||
IMMUTABLE
|
||||
RUNTIME_VERSION = '3.8'
|
||||
RUNTIME_VERSION = '3.10'
|
||||
HANDLER = 'transform'
|
||||
sql: |
|
||||
{{ python_udf_evm_transform_log() | indent(4) }}
|
||||
@ -238,13 +238,25 @@
|
||||
RETURNS NULL ON NULL INPUT
|
||||
sql: evm/decode/log
|
||||
|
||||
- name: {{ schema }}.udf_evm_decode_trace
|
||||
signature:
|
||||
- [abi, OBJECT]
|
||||
- [data, OBJECT]
|
||||
return_type: ARRAY
|
||||
func_type: EXTERNAL
|
||||
api_integration: '{{ var("API_INTEGRATION") }}'
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
sql: evm/decode/trace
|
||||
|
||||
- name: {{ schema }}.udf_base58_to_hex
|
||||
signature:
|
||||
- [base58, STRING]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
LANGUAGE PYTHON
|
||||
RUNTIME_VERSION = '3.8'
|
||||
RUNTIME_VERSION = '3.10'
|
||||
HANDLER = 'transform_base58_to_hex'
|
||||
sql: |
|
||||
{{ create_udf_base58_to_hex() | indent(4) }}
|
||||
@ -255,7 +267,7 @@
|
||||
return_type: TEXT
|
||||
options: |
|
||||
LANGUAGE PYTHON
|
||||
RUNTIME_VERSION = '3.8'
|
||||
RUNTIME_VERSION = '3.10'
|
||||
HANDLER = 'transform_hex_to_base58'
|
||||
sql: |
|
||||
{{ create_udf_hex_to_base58() | indent(4) }}
|
||||
@ -267,7 +279,7 @@
|
||||
return_type: TEXT
|
||||
options: |
|
||||
LANGUAGE PYTHON
|
||||
RUNTIME_VERSION = '3.8'
|
||||
RUNTIME_VERSION = '3.10'
|
||||
HANDLER = 'transform_hex_to_bech32'
|
||||
sql: |
|
||||
{{ create_udf_hex_to_bech32() | indent(4) }}
|
||||
@ -278,7 +290,7 @@
|
||||
return_type: TEXT
|
||||
options: |
|
||||
LANGUAGE PYTHON
|
||||
RUNTIME_VERSION = '3.8'
|
||||
RUNTIME_VERSION = '3.10'
|
||||
HANDLER = 'int_to_binary'
|
||||
sql: |
|
||||
{{ create_udf_int_to_binary() | indent(4) }}
|
||||
@ -289,9 +301,22 @@
|
||||
return_type: TEXT
|
||||
options: |
|
||||
LANGUAGE PYTHON
|
||||
RUNTIME_VERSION = '3.8'
|
||||
RUNTIME_VERSION = '3.10'
|
||||
HANDLER = 'binary_to_int'
|
||||
sql: |
|
||||
{{ create_udf_binary_to_int() | indent(4) }}
|
||||
|
||||
{% endmacro %}
|
||||
- name: {{ schema }}.udf_redirect_s3_presigned_url
|
||||
signature:
|
||||
- [url, STRING]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
LANGUAGE PYTHON
|
||||
RUNTIME_VERSION = '3.10'
|
||||
HANDLER = 'process_request'
|
||||
EXTERNAL_ACCESS_INTEGRATIONS = (S3_EXPRESS_EXTERNAL_ACCESS_INTEGRATION)
|
||||
PACKAGES = ('requests')
|
||||
sql: |
|
||||
{{ create_udf_redirect_s3_presigned_url() | indent(4) }}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
@ -24,7 +24,7 @@
|
||||
COMMENT = $$Returns the native asset balance at the latest block for a given address.$$
|
||||
sql: |
|
||||
{{ evm_latest_native_balance_string(schema, blockchain, network) | indent(4) -}}
|
||||
|
||||
|
||||
- name: {{ schema -}}.tf_latest_native_balance
|
||||
signature:
|
||||
- [wallets, ARRAY, An array of addresses string to get the balance of at the latest block]
|
||||
@ -41,7 +41,7 @@
|
||||
- name: {{ schema -}}.tf_latest_token_balance
|
||||
signature:
|
||||
- [wallet, STRING, The address to get the balance of at the latest block]
|
||||
- [token, STRING, The address of the token to get the balance of]
|
||||
- [token, STRING, The address of the token to get the balance of]
|
||||
return_type:
|
||||
- "TABLE(status STRING, blockchain STRING, network STRING, wallet_address STRING, token_address STRING, symbol STRING, raw_balance STRING, balance FLOAT)"
|
||||
options: |
|
||||
@ -55,7 +55,7 @@
|
||||
- name: {{ schema -}}.tf_latest_token_balance
|
||||
signature:
|
||||
- [wallet, STRING, The address to get the balance of at the latest block]
|
||||
- [tokens, ARRAY, An array of address strings of the tokens to get the balance of]
|
||||
- [tokens, ARRAY, An array of address strings of the tokens to get the balance of]
|
||||
return_type:
|
||||
- "TABLE(status STRING, blockchain STRING, network STRING, wallet_address STRING, token_address STRING, symbol STRING, raw_balance STRING, balance FLOAT)"
|
||||
options: |
|
||||
@ -83,7 +83,7 @@
|
||||
- name: {{ schema -}}.tf_latest_token_balance
|
||||
signature:
|
||||
- [wallets, ARRAY, An array of addresses string to get the balance of at the latest block]
|
||||
- [tokens, ARRAY, An array of address strings of the tokens to get the balance of]
|
||||
- [tokens, ARRAY, An array of address strings of the tokens to get the balance of]
|
||||
return_type:
|
||||
- "TABLE(status STRING, blockchain STRING, network STRING, wallet_address STRING, token_address STRING, symbol STRING, raw_balance STRING, balance FLOAT)"
|
||||
options: |
|
||||
@ -281,7 +281,7 @@
|
||||
COMMENT = $$Returns the latest events emitted by a contract within the last `lookback` blocks. *Please note there are RPC limitations on this method.*$$
|
||||
sql: |
|
||||
{{ evm_latest_contract_events_si(schema, blockchain, network) | indent(4) -}}
|
||||
|
||||
|
||||
- name: {{ schema -}}.tf_latest_contract_events
|
||||
signature:
|
||||
- [addresses, ARRAY, The addresses of the contracts to get the events of]
|
||||
@ -362,8 +362,168 @@
|
||||
COMMENT = $$Returns the latest decoded events emitted by multiple contracts within the last `lookback` blocks. Submit missing ABIs [here](https://science.flipsidecrypto.xyz/abi-requestor/). *Please note there are RPC limitations on this method.* $$
|
||||
sql: |
|
||||
{{ evm_latest_contract_events_decoded_ai(schema, blockchain, network) | indent(4) -}}
|
||||
|
||||
- name: {{ schema -}}.tf_fact_blocks
|
||||
signature:
|
||||
- [block_height, INTEGER, The start block height to get the blocks from]
|
||||
- [to_latest, BOOLEAN, Whether to continue fetching blocks until the latest block or not]
|
||||
return_type:
|
||||
- "TABLE(block_number INTEGER, block_timestamp TIMESTAMP_NTZ, network STRING, blockchain STRING, tx_count INTEGER, difficulty INTEGER, total_difficulty INTEGER, extra_data STRING, gas_limit INTEGER, gas_used INTEGER, hash STRING, parent_hash STRING, miner STRING, nonce INTEGER, receipts_root STRING, sha3_uncles STRING, size INTEGER, uncle_blocks VARIANT, block_header_json OBJECT, excess_blob_gas INTEGER, blob_gas_used INTEGER, fact_blocks_id STRING, inserted_timestamp TIMESTAMP_NTZ, modified_timestamp TIMESTAMP_NTZ, withdrawals VARIANT, withdrawals_root STRING)"
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
COMMENT = $$Returns the block data for a given block height. If to_latest is true, it will continue fetching blocks until the latest block. Otherwise, it will fetch blocks until the block height is reached.$$
|
||||
sql: |
|
||||
{{ evm_live_view_fact_blocks(schema, blockchain, network) | indent(4) -}}
|
||||
|
||||
- name: {{ schema -}}.tf_fact_event_logs
|
||||
signature:
|
||||
- [block_height, INTEGER, The start block height to get the logs from]
|
||||
- [to_latest, BOOLEAN, Whether to continue fetching logs until the latest block or not]
|
||||
return_type:
|
||||
- "TABLE(block_number INTEGER, block_timestamp TIMESTAMP_NTZ, tx_hash STRING, origin_function_signature STRING, origin_from_address STRING, origin_to_address STRING, event_index INTEGER, contract_address STRING, topics VARIANT, data STRING, event_removed BOOLEAN, tx_status STRING, _log_id STRING, fact_event_logs_id STRING, inserted_timestamp TIMESTAMP_NTZ, modified_timestamp TIMESTAMP_NTZ)"
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
COMMENT = $$Returns the logs for a given block height. If to_latest is true, it will continue fetching logs until the latest block. Otherwise, it will fetch logs until the block height is reached.$$
|
||||
sql: |
|
||||
{{ evm_live_view_fact_event_logs(schema, blockchain, network) | indent(4) -}}
|
||||
|
||||
- name: {{ schema -}}.tf_fact_decoded_event_logs
|
||||
signature:
|
||||
- [block_height, INTEGER, The start block height to get the logs from]
|
||||
- [to_latest, BOOLEAN, Whether to continue fetching logs until the latest block or not]
|
||||
return_type:
|
||||
- "TABLE(block_number INTEGER, block_timestamp TIMESTAMP_NTZ, tx_hash STRING, event_index INTEGER, contract_address STRING, event_name STRING, decoded_log OBJECT, full_decoded_log VARIANT, fact_decoded_event_logs_id STRING, inserted_timestamp TIMESTAMP_NTZ, modified_timestamp TIMESTAMP_NTZ)"
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
COMMENT = $$Returns the decoded event logs data for a given block height. If to_latest is true, it will continue fetching blocks until the latest block. Otherwise, it will fetch blocks until the block height is reached.$$
|
||||
sql: |
|
||||
{{ evm_live_view_fact_decoded_event_logs(schema, blockchain, network) | indent(4) -}}
|
||||
|
||||
- name: {{ schema -}}.tf_fact_decoded_traces
|
||||
signature:
|
||||
- [block_height, INTEGER, The start block height to get the logs from]
|
||||
- [to_latest, BOOLEAN, Whether to continue fetching logs until the latest block or not]
|
||||
return_type:
|
||||
- "TABLE(block_number INTEGER, tx_hash STRING, block_timestamp TIMESTAMP_NTZ, tx_status STRING, tx_position INTEGER, trace_index INTEGER, from_address STRING, to_address STRING, VALUE FLOAT, value_precise_raw STRING, value_precise STRING, gas INTEGER, gas_used INTEGER, TYPE STRING, identifier STRING, sub_traces INTEGER, error_reason STRING, trace_status STRING, input STRING, output STRING, function_name STRING, decoded_input_data VARIANT, decoded_output_data VARIANT, fact_decoded_traces_id STRING, inserted_timestamp TIMESTAMP_NTZ, modified_timestamp TIMESTAMP_NTZ)"
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
COMMENT = $$Returns the decoded traces data for a given block height. If to_latest is true,
|
||||
it will continue fetching blocks until the latest block. Otherwise,
|
||||
it will fetch blocks until the block height is reached.$$
|
||||
sql: |
|
||||
{{ evm_live_view_fact_decoded_traces(schema,
|
||||
blockchain, network) | indent(4) -}}
|
||||
|
||||
- name: {{ schema -}}.tf_fact_traces
|
||||
signature:
|
||||
- [block_height, INTEGER, The start block height to get the transfers from]
|
||||
- [to_latest, BOOLEAN, Whether to continue fetching transfers until the latest block or not]
|
||||
return_type:
|
||||
- "TABLE(tx_hash STRING, block_number NUMBER, block_timestamp TIMESTAMP_NTZ(9), from_address STRING, to_address STRING, value FLOAT, value_precise_raw STRING, value_precise STRING, gas NUMBER, gas_used NUMBER, input STRING, output STRING, TYPE STRING, identifier STRING, DATA OBJECT, tx_status STRING, sub_traces NUMBER, trace_status STRING, error_reason STRING, trace_index NUMBER, fact_traces_id STRING, inserted_timestamp TIMESTAMP_NTZ(9), modified_timestamp TIMESTAMP_NTZ(9))"
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
COMMENT = $$Returns the traces for a given block height. If to_latest is true, it will continue fetching traces until the latest block. Otherwise, it will fetch traces until the block height is reached.$$
|
||||
sql: |
|
||||
{{ evm_live_view_fact_traces(schema, blockchain, network) | indent(4) -}}
|
||||
|
||||
- name: {{ schema -}}.tf_fact_transactions
|
||||
signature:
|
||||
- [block_height, INTEGER, The start block height to get the transfers from]
|
||||
- [to_latest, BOOLEAN, Whether to continue fetching transfers until the latest block or not]
|
||||
return_type:
|
||||
- "TABLE(block_number NUMBER, block_timestamp TIMESTAMP_NTZ, block_hash STRING, tx_hash STRING, nonce NUMBER, POSITION NUMBER, origin_function_signature STRING, from_address STRING, to_address STRING, VALUE FLOAT, value_precise_raw STRING, value_precise STRING, tx_fee FLOAT, tx_fee_precise STRING, gas_price FLOAT, gas_limit NUMBER, gas_used NUMBER, cumulative_gas_used NUMBER, input_data STRING, status STRING, effective_gas_price FLOAT, max_fee_per_gas FLOAT, max_priority_fee_per_gas FLOAT, r STRING, s STRING, v STRING, tx_type NUMBER, chain_id NUMBER, blob_versioned_hashes ARRAY, max_fee_per_blob_gas NUMBER, blob_gas_used NUMBER, blob_gas_price NUMBER, fact_transactions_id STRING, inserted_timestamp TIMESTAMP_NTZ, modified_timestamp TIMESTAMP_NTZ)"
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
COMMENT = $$Returns the transactions for a given block height. If to_latest is true, it will continue fetching transactions until the latest block. Otherwise, it will fetch transactions until the block height is reached.$$
|
||||
sql: |
|
||||
{{ evm_live_view_fact_transactions(schema, blockchain, network) | indent(4) -}}
|
||||
|
||||
- name: {{ schema -}}.tf_fact_token_balances
|
||||
signature:
|
||||
- [block_height, INTEGER, The start block height to get the transfers from]
|
||||
- [to_latest, BOOLEAN, Whether to continue fetching transfers until the latest block or not]
|
||||
return_type:
|
||||
- "TABLE(block_number NUMBER, block_timestamp TIMESTAMP_NTZ, address STRING, contract_address STRING, balance NUMBER(38,0), fact_token_balances_id STRING, inserted_timestamp TIMESTAMP_NTZ, modified_timestamp TIMESTAMP_NTZ)"
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
COMMENT = $$Returns the token balances for a given block height. If to_latest is true, it will continue fetching transactions until the latest block. Otherwise, it will fetch transactions until the block height is reached.$$
|
||||
sql: |
|
||||
{{ evm_live_view_fact_token_balances(schema, blockchain, network) | indent(4) -}}
|
||||
|
||||
- name: {{ schema -}}.tf_fact_eth_balances
|
||||
signature:
|
||||
- [block_height, INTEGER, The start block height to get the transfers from]
|
||||
- [to_latest, BOOLEAN, Whether to continue fetching transfers until the latest block or not]
|
||||
return_type:
|
||||
- "TABLE(block_number NUMBER, block_timestamp TIMESTAMP_NTZ, address STRING, balance NUMBER(38,0), fact_eth_balances_id STRING, inserted_timestamp TIMESTAMP_NTZ, modified_timestamp TIMESTAMP_NTZ)"
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
COMMENT = $$Returns the eth balances for a given block height. If to_latest is true, it will continue fetching transactions until the latest block. Otherwise, it will fetch transactions until the block height is reached.$$
|
||||
sql: |
|
||||
{{ evm_live_view_fact_eth_balances(schema, blockchain, network) | indent(4) -}}
|
||||
|
||||
|
||||
- name: {{ schema -}}.tf_ez_decoded_event_logs
|
||||
signature:
|
||||
- [block_height, INTEGER, The start block height to get the logs from]
|
||||
- [to_latest, BOOLEAN, Whether to continue fetching logs until the latest block or not]
|
||||
return_type:
|
||||
- "TABLE(block_number INTEGER, block_timestamp TIMESTAMP_NTZ, tx_hash STRING, event_index INTEGER, contract_address STRING, contract_name STRING, event_name STRING, decoded_log OBJECT, full_decoded_log VARIANT, origin_function_signature STRING, origin_from_address STRING, origin_to_address STRING, topics VARIANT, data STRING, event_removed BOOLEAN, tx_status STRING, ez_decoded_event_logs_id STRING, inserted_timestamp TIMESTAMP_NTZ, modified_timestamp TIMESTAMP_NTZ)"
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
COMMENT = $$Returns the ez decoded event logs data for a given block height. If to_latest is true, it will continue fetching blocks until the latest block. Otherwise, it will fetch blocks until the block height is reached.$$
|
||||
sql: |
|
||||
{{ evm_live_view_ez_decoded_event_logs(schema, blockchain, network) | indent(4) -}}
|
||||
|
||||
- name: {{ schema -}}.tf_ez_native_transfers
|
||||
signature:
|
||||
- [block_height, INTEGER, The start block height to get the transfers from]
|
||||
- [to_latest, BOOLEAN, Whether to continue fetching transfers until the latest block or not]
|
||||
return_type:
|
||||
- "TABLE(tx_hash STRING, block_number NUMBER(38,0), block_timestamp TIMESTAMP_NTZ(9), tx_position NUMBER(38,0), trace_index NUMBER(19,0), identifier STRING, origin_from_address STRING, origin_to_address STRING, origin_function_signature STRING, from_address STRING, to_address STRING, amount FLOAT, amount_precise_raw STRING, amount_precise STRING, amount_usd FLOAT, ez_native_transfers_id STRING, inserted_timestamp TIMESTAMP_NTZ(9), modified_timestamp TIMESTAMP_NTZ(9))"
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
COMMENT = $$Returns the native transfers for a given block height. If to_latest is true, it will continue fetching transfers until the latest block. Otherwise, it will fetch transfers until the block height is reached.$$
|
||||
sql: |
|
||||
{{ evm_live_view_ez_native_transfers(schema, blockchain, network) | indent(4) -}}
|
||||
|
||||
- name: {{ schema -}}.tf_ez_token_transfers
|
||||
signature:
|
||||
- [block_height, INTEGER, The start block height to get the transfers from]
|
||||
- [to_latest, BOOLEAN, Whether to continue fetching transfers until the latest block or not]
|
||||
return_type:
|
||||
- "TABLE(block_number INTEGER, block_timestamp TIMESTAMP_NTZ, tx_hash STRING, event_index INTEGER, origin_function_signature STRING, origin_from_address STRING, origin_to_address STRING, contract_address STRING, from_address STRING, to_address STRING, raw_amount_precise STRING, raw_amount FLOAT, amount_precise FLOAT, amount FLOAT, amount_usd FLOAT, decimals INTEGER, symbol STRING, token_price FLOAT, has_decimal STRING, has_price STRING, _log_id STRING, ez_token_transfers_id STRING, _inserted_timestamp TIMESTAMP_NTZ, inserted_timestamp TIMESTAMP_NTZ, modified_timestamp TIMESTAMP_NTZ)"
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
COMMENT = $$Returns the token transfers for a given block height. If to_latest is true, it will continue fetching transfers until the latest block. Otherwise, it will fetch transfers until the block height is reached.$$
|
||||
sql: |
|
||||
{{ evm_live_view_ez_token_transfers(schema, blockchain, network) | indent(4) -}}
|
||||
|
||||
{%- endmacro -%}
|
||||
|
||||
|
||||
{% macro config_eth_high_level_abstractions(blockchain, network) -%}
|
||||
{#
|
||||
This macro is used to generate high level abstractions for Ethereum mainnet only.
|
||||
@ -393,7 +553,7 @@
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
COMMENT = $$Returns the decoded events emitted by a contract from a specific block to the latest block. Submit missing ABIs [here](https://science.flipsidecrypto.xyz/abi-requestor/).$$
|
||||
COMMENT = $$Returns the decoded events emitted by a contract from a specific block to the latest block. Submit missing ABIs [here](https://science.flipsidecrypto.xyz/abi-requestor/).$$
|
||||
sql: |
|
||||
{{ evm_contract_events_decoded(schema, blockchain, network) | indent(4) -}}
|
||||
{%- endmacro -%}
|
||||
{%- endmacro -%}
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
{% macro evm_latest_native_balance_string(schema, blockchain, network) %}
|
||||
with base as (select lower(wallet) AS wallet_address)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -33,10 +33,10 @@ node_call AS (
|
||||
FROM flat_addresses
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -76,11 +76,11 @@ node_call AS (
|
||||
and blockchain = '{{blockchain}}'
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and REGEXP_LIKE(token_address, '^0x([a-fA-F0-9]{40})$')
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and REGEXP_LIKE(token_address, '^0x([a-fA-F0-9]{40})$')
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -118,16 +118,16 @@ final AS (
|
||||
raw_balance::INT / POW(10, ifnull(decimals,0)) AS balance
|
||||
FROM
|
||||
flat_rows
|
||||
LEFT JOIN {{ ref('_evm__contracts_map') }}
|
||||
LEFT JOIN {{ ref('_evm__contracts_map') }}
|
||||
ON token_address = address
|
||||
and blockchain = '{{blockchain}}'
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and REGEXP_LIKE(token_address, '^0x([a-fA-F0-9]{40})$')
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and REGEXP_LIKE(token_address, '^0x([a-fA-F0-9]{40})$')
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -165,16 +165,16 @@ final AS (
|
||||
raw_balance::INT / POW(10, ifnull(decimals,0)) AS balance
|
||||
FROM
|
||||
flat_rows
|
||||
LEFT JOIN {{ ref('_evm__contracts_map') }}
|
||||
LEFT JOIN {{ ref('_evm__contracts_map') }}
|
||||
ON token_address = address
|
||||
and blockchain = '{{blockchain}}'
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and REGEXP_LIKE(token_address, '^0x([a-fA-F0-9]{40})$')
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and REGEXP_LIKE(token_address, '^0x([a-fA-F0-9]{40})$')
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -214,16 +214,16 @@ final AS (
|
||||
raw_balance::INT / POW(10, ifnull(decimals,0)) AS balance
|
||||
FROM
|
||||
flat_rows
|
||||
LEFT JOIN {{ ref('_evm__contracts_map') }}
|
||||
LEFT JOIN {{ ref('_evm__contracts_map') }}
|
||||
ON token_address = address
|
||||
and blockchain = '{{blockchain}}'
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and REGEXP_LIKE(token_address, '^0x([a-fA-F0-9]{40})$')
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and REGEXP_LIKE(token_address, '^0x([a-fA-F0-9]{40})$')
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -262,12 +262,12 @@ WITH inputs AS (
|
||||
AND blockchain = '{{blockchain}}'
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and REGEXP_LIKE(token_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and is_integer(block_number)
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -316,12 +316,12 @@ final AS (
|
||||
AND blockchain = '{{blockchain}}'
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and REGEXP_LIKE(token_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and is_integer(block_number)
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -370,12 +370,12 @@ final AS (
|
||||
AND blockchain = '{{blockchain}}'
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and REGEXP_LIKE(token_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and is_integer(block_number)
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -433,12 +433,12 @@ final AS (
|
||||
AND blockchain = '{{blockchain}}'
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and REGEXP_LIKE(token_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and is_integer(block_number)
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -488,12 +488,12 @@ final AS (
|
||||
AND blockchain = '{{blockchain}}'
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and REGEXP_LIKE(token_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and is_integer(block_number)
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -552,12 +552,12 @@ final AS (
|
||||
AND blockchain = '{{blockchain}}'
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and REGEXP_LIKE(token_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and is_integer(block_number)
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -616,12 +616,12 @@ final AS (
|
||||
AND blockchain = '{{blockchain}}'
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and REGEXP_LIKE(token_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and is_integer(block_number)
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -689,12 +689,12 @@ final AS (
|
||||
AND blockchain = '{{blockchain}}'
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and REGEXP_LIKE(token_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and is_integer(block_number)
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -710,11 +710,11 @@ FROM final
|
||||
{% macro evm_historical_native_balance_si(schema, blockchain, network) %}
|
||||
with base as (select lower(wallet) AS wallet_address, CONCAT('0x', TRIM(TO_CHAR(block_number, 'XXXXXXXXXX'))) as hex_block)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and is_integer(block_number)
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -723,7 +723,7 @@ SELECT
|
||||
block_number,
|
||||
utils.udf_hex_to_int({{schema}}.udf_rpc_eth_get_balance(wallet_address,hex_block)::string) AS raw_balance,
|
||||
(raw_balance / POW(10,18))::float AS balance
|
||||
FROM base
|
||||
FROM base
|
||||
LEFT JOIN {{ ref('_evm__native_symbol_map') }}
|
||||
on '{{blockchain}}' = blockchain
|
||||
and '{{network}}' = network
|
||||
@ -746,11 +746,11 @@ inputs AS (
|
||||
FROM blocks
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and is_integer(block_number)
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -782,11 +782,11 @@ inputs AS (
|
||||
FROM flat_wallets
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and is_integer(block_number)
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -819,11 +819,11 @@ and '{{network}}' = network
|
||||
FROM flat_wallets
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
case
|
||||
when REGEXP_LIKE(wallet_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and is_integer(block_number)
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -866,10 +866,10 @@ and '{{network}}' = network
|
||||
LATERAL FLATTEN(input => eth_getLogs)
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(contract_address, '^0x([a-fA-F0-9]{40})$')
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
case
|
||||
when REGEXP_LIKE(contract_address, '^0x([a-fA-F0-9]{40})$')
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -910,11 +910,11 @@ and '{{network}}' = network
|
||||
LATERAL FLATTEN(input => eth_getLogs)
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(contract_address, '^0x([a-fA-F0-9]{40})$')
|
||||
case
|
||||
when REGEXP_LIKE(contract_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and is_integer(lookback)
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -958,10 +958,10 @@ and '{{network}}' = network
|
||||
LATERAL FLATTEN(input => eth_getLogs)
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(contract_address, '^0x([a-fA-F0-9]{40})$')
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
case
|
||||
when REGEXP_LIKE(contract_address, '^0x([a-fA-F0-9]{40})$')
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -1005,11 +1005,11 @@ and '{{network}}' = network
|
||||
LATERAL FLATTEN(input => eth_getLogs)
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(contract_address, '^0x([a-fA-F0-9]{40})$')
|
||||
case
|
||||
when REGEXP_LIKE(contract_address, '^0x([a-fA-F0-9]{40})$')
|
||||
and is_integer(lookback)
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -1125,10 +1125,10 @@ final AS (
|
||||
transformed
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(n.contract_address, '^0x([a-fA-F0-9]{40})$') then 'Success'
|
||||
case
|
||||
when REGEXP_LIKE(n.contract_address, '^0x([a-fA-F0-9]{40})$') then 'Success'
|
||||
when f.event_name is null then 'Error - Contract ABI Not Found, submit ABIs [here](https://science.flipsidecrypto.xyz/abi-requestor/)'
|
||||
else 'Error - Invalid Input'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -1253,10 +1253,10 @@ final AS (
|
||||
transformed
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(n.contract_address, '^0x([a-fA-F0-9]{40})$') and is_integer(n.lookback) then 'Success'
|
||||
case
|
||||
when REGEXP_LIKE(n.contract_address, '^0x([a-fA-F0-9]{40})$') and is_integer(n.lookback) then 'Success'
|
||||
when f.event_name is null then 'Error - Contract ABI Not Found, submit ABIs [here](https://science.flipsidecrypto.xyz/abi-requestor/)'
|
||||
else 'Error - Invalid Input'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -1380,10 +1380,10 @@ final AS (
|
||||
transformed
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(n.contract_address, '^0x([a-fA-F0-9]{40})$') then 'Success'
|
||||
case
|
||||
when REGEXP_LIKE(n.contract_address, '^0x([a-fA-F0-9]{40})$') then 'Success'
|
||||
when f.event_name is null then 'Error - Contract ABI Not Found, submit ABIs [here](https://science.flipsidecrypto.xyz/abi-requestor/)'
|
||||
else 'Error - Invalid Input'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -1510,10 +1510,10 @@ final AS (
|
||||
transformed
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(n.contract_address, '^0x([a-fA-F0-9]{40})$') and is_integer(n.lookback) then 'Success'
|
||||
case
|
||||
when REGEXP_LIKE(n.contract_address, '^0x([a-fA-F0-9]{40})$') and is_integer(n.lookback) then 'Success'
|
||||
when f.event_name is null then 'Error - Contract ABI Not Found, submit ABIs [here](https://science.flipsidecrypto.xyz/abi-requestor/)'
|
||||
else 'Error - Invalid Input'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -1561,10 +1561,10 @@ and n.event_index = f.event_index
|
||||
LATERAL FLATTEN(input => eth_getLogs)
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(contract_address, '^0x([a-fA-F0-9]{40})$')
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
case
|
||||
when REGEXP_LIKE(contract_address, '^0x([a-fA-F0-9]{40})$')
|
||||
then 'Success'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -1577,7 +1577,7 @@ and n.event_index = f.event_index
|
||||
FROM node_flat
|
||||
UNION ALL
|
||||
SELECT
|
||||
'Success' as status,
|
||||
'Success' as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
tx_hash,
|
||||
@ -1588,7 +1588,7 @@ and n.event_index = f.event_index
|
||||
data as event_data
|
||||
from {{ ref('_eth__logs') }}
|
||||
where contract_address = (select contract_address from node_call)
|
||||
and block_number >= min_block
|
||||
and block_number >= min_block
|
||||
and block_number <= (select min_block_no from chainhead)
|
||||
{% endmacro %}
|
||||
|
||||
@ -1696,10 +1696,10 @@ final AS (
|
||||
transformed
|
||||
)
|
||||
SELECT
|
||||
case
|
||||
when REGEXP_LIKE(n.contract_address, '^0x([a-fA-F0-9]{40})$') and is_integer(min_block) then 'Success'
|
||||
case
|
||||
when REGEXP_LIKE(n.contract_address, '^0x([a-fA-F0-9]{40})$') and is_integer(min_block) then 'Success'
|
||||
when f.event_name is null then 'Error - Contract ABI Not Found, submit ABIs [here](https://science.flipsidecrypto.xyz/abi-requestor/)'
|
||||
else 'Error - Invalid Input'
|
||||
else 'Error - Invalid Input'
|
||||
end as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -1717,7 +1717,7 @@ on n.block_number = f.block_number
|
||||
and n.tx_hash = f.tx_hash
|
||||
and n.event_index = f.event_index
|
||||
union all
|
||||
select
|
||||
select
|
||||
'Success' as status,
|
||||
'{{blockchain}}' AS blockchain,
|
||||
'{{network}}' AS network,
|
||||
@ -1733,4 +1733,4 @@ from {{ ref('_eth__decoded_logs') }}
|
||||
where contract_address = (select contract_address from inputs)
|
||||
and block_number >= min_block
|
||||
and block_number <= (select min_block_no from chainhead)
|
||||
{% endmacro %}
|
||||
{% endmacro %}
|
||||
|
||||
1638
macros/evm/evm_live_views.sql
Normal file
1638
macros/evm/evm_live_views.sql
Normal file
File diff suppressed because it is too large
Load Diff
@ -33,7 +33,7 @@
|
||||
|
||||
`dbt run-operation apply_grants_to_all_schema`
|
||||
#}
|
||||
{% if execute and target.name == "prod" %}
|
||||
{% if execute and target.name in ("prod", "hosted",) %}
|
||||
{% set sql_get_schema %}
|
||||
SELECT SCHEMA_NAME
|
||||
FROM {{ target.database }}.INFORMATION_SCHEMA.SCHEMATA
|
||||
|
||||
22
macros/livequery/external_access_integrations.sql
Normal file
22
macros/livequery/external_access_integrations.sql
Normal file
@ -0,0 +1,22 @@
|
||||
{% macro create_s3_express_external_access_integration() %}
|
||||
{% set network_rule_sql %}
|
||||
CREATE NETWORK RULE IF NOT EXISTS s3_express_network_rule
|
||||
MODE = EGRESS
|
||||
TYPE = HOST_PORT
|
||||
VALUE_LIST = (
|
||||
'*.s3express-use1-az4.us-east-1.amazonaws.com:443',
|
||||
'*.s3express-use1-az5.us-east-1.amazonaws.com:443',
|
||||
'*.s3express-use1-az6.us-east-1.amazonaws.com:443'
|
||||
)
|
||||
{% endset %}
|
||||
|
||||
{% set external_access_sql %}
|
||||
CREATE EXTERNAL ACCESS INTEGRATION IF NOT EXISTS s3_express_external_access_integration
|
||||
ALLOWED_NETWORK_RULES = (s3_express_network_rule)
|
||||
ENABLED = true
|
||||
{% endset %}
|
||||
|
||||
{% do run_query(use_schema_sql) %}
|
||||
{% do run_query(network_rule_sql) %}
|
||||
{% do run_query(external_access_sql) %}
|
||||
{% endmacro %}
|
||||
@ -26,6 +26,32 @@
|
||||
{% endfor -%}
|
||||
{%- endmacro -%}
|
||||
|
||||
{%- macro format_headers(headers) -%}
|
||||
{%- if headers -%}
|
||||
{%- if headers is mapping -%}
|
||||
{%- set header_items = [] -%}
|
||||
{%- for key, value in headers.items() -%}
|
||||
{%- set _ = header_items.append("'" ~ key ~ "' = '" ~ value ~ "'") -%}
|
||||
{%- endfor -%}
|
||||
HEADERS = (
|
||||
{{ header_items | join(',\n ') }}
|
||||
)
|
||||
{%- elif headers is iterable -%}
|
||||
{%- set header_items = [] -%}
|
||||
{%- for item in headers -%}
|
||||
{%- if item is mapping -%}
|
||||
{%- for key, value in item.items() -%}
|
||||
{%- set _ = header_items.append("'" ~ key ~ "' = '" ~ value ~ "'") -%}
|
||||
{%- endfor -%}
|
||||
{%- endif -%}
|
||||
{%- endfor -%}
|
||||
HEADERS = (
|
||||
{{ header_items | join(',\n ') }}
|
||||
)
|
||||
{%- endif -%}
|
||||
{%- endif -%}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro create_sql_function(
|
||||
name_,
|
||||
signature,
|
||||
@ -33,10 +59,12 @@
|
||||
sql_,
|
||||
api_integration = none,
|
||||
options = none,
|
||||
func_type = none
|
||||
func_type = none,
|
||||
max_batch_rows = none,
|
||||
headers = none
|
||||
) %}
|
||||
CREATE OR REPLACE {{ func_type }} FUNCTION {{ name_ }}(
|
||||
{{- compile_signature(signature) }}
|
||||
{{- livequery_models.compile_signature(signature) }}
|
||||
)
|
||||
COPY GRANTS
|
||||
RETURNS {{ return_type }}
|
||||
@ -44,9 +72,15 @@
|
||||
{{ options }}
|
||||
{% endif %}
|
||||
{%- if api_integration -%}
|
||||
api_integration = {{ api_integration }}
|
||||
AS {{ construct_api_route(sql_) ~ ";" }}
|
||||
{% else -%}
|
||||
api_integration = {{ api_integration -}}
|
||||
{%- if max_batch_rows -%}
|
||||
{{ "\n max_batch_rows = " ~ max_batch_rows -}}
|
||||
{%- endif -%}
|
||||
{%- if headers -%}
|
||||
{{ "\n" ~ livequery_models.format_headers(headers) -}}
|
||||
{%- endif -%}
|
||||
{{ "\n AS " ~ livequery_models.construct_api_route(sql_) ~ ";" -}}
|
||||
{%- else -%}
|
||||
AS
|
||||
$$
|
||||
{{ sql_ }}
|
||||
@ -65,16 +99,19 @@
|
||||
{% set options = config ["options"] %}
|
||||
{% set api_integration = config ["api_integration"] %}
|
||||
{% set func_type = config ["func_type"] %}
|
||||
|
||||
{% set max_batch_rows = config ["max_batch_rows"] %}
|
||||
{% set headers = config ["headers"] %}
|
||||
{% if not drop_ -%}
|
||||
{{ create_sql_function(
|
||||
{{ livequery_models.create_sql_function(
|
||||
name_ = name_,
|
||||
signature = signature,
|
||||
return_type = return_type,
|
||||
sql_ = sql_,
|
||||
options = options,
|
||||
api_integration = api_integration,
|
||||
func_type = func_type
|
||||
max_batch_rows = max_batch_rows,
|
||||
func_type = func_type,
|
||||
headers = headers
|
||||
) }}
|
||||
{%- else -%}
|
||||
{{ drop_function(
|
||||
@ -113,7 +150,7 @@
|
||||
CREATE SCHEMA IF NOT EXISTS {{ schema }};
|
||||
{%- set configs = fromyaml(config_func(blockchain, network)) if network else fromyaml(config_func(schema, blockchain)) -%}
|
||||
{%- for udf in configs -%}
|
||||
{{- create_or_drop_function_from_config(udf, drop_=drop_) -}}
|
||||
{{- livequery_models.create_or_drop_function_from_config(udf, drop_=drop_) -}}
|
||||
{%- endfor -%}
|
||||
{%- endmacro -%}
|
||||
|
||||
@ -154,14 +191,14 @@
|
||||
#}
|
||||
{% if execute and (var("UPDATE_UDFS_AND_SPS") or var("DROP_UDFS_AND_SPS")) and model.unique_id in selected_resources %}
|
||||
{% set sql %}
|
||||
{{- crud_udfs(config, this.schema, var("DROP_UDFS_AND_SPS")) -}}
|
||||
{{- livequery_models.crud_udfs(config, this.schema, var("DROP_UDFS_AND_SPS")) -}}
|
||||
{%- endset -%}
|
||||
{%- if var("DROP_UDFS_AND_SPS") -%}
|
||||
{%- do log("Drop core udfs: " ~ this.database ~ "." ~ this.schema, true) -%}
|
||||
{%- else -%}
|
||||
{%- do log("Deploy core udfs: " ~ this.database ~ "." ~ this.schema, true) -%}
|
||||
{%- endif -%}
|
||||
{%- do run_query(sql ~ apply_grants_by_schema(this.schema)) -%}
|
||||
{%- do run_query(sql ~ livequery_models.apply_grants_by_schema(this.schema)) -%}
|
||||
{%- endif -%}
|
||||
SELECT '{{ model.schema }}' as schema_
|
||||
{%- endmacro -%}
|
||||
@ -177,7 +214,7 @@
|
||||
{% if execute and (var("UPDATE_UDFS_AND_SPS") or var("DROP_UDFS_AND_SPS")) and model.unique_id in selected_resources %}
|
||||
{% set sql %}
|
||||
{% for config in configs %}
|
||||
{{- crud_udfs_by_chain(config, blockchain, network, var("DROP_UDFS_AND_SPS")) -}}
|
||||
{{- livequery_models.crud_udfs_by_chain(config, blockchain, network, var("DROP_UDFS_AND_SPS")) -}}
|
||||
{%- endfor -%}
|
||||
{%- endset -%}
|
||||
{%- if var("DROP_UDFS_AND_SPS") -%}
|
||||
@ -185,7 +222,7 @@
|
||||
{%- else -%}
|
||||
{%- do log("Deploy partner udfs: " ~ this.database ~ "." ~ schema, true) -%}
|
||||
{%- endif -%}
|
||||
{%- do run_query(sql ~ apply_grants_by_schema(schema)) -%}
|
||||
{%- do run_query(sql ~ livequery_models.apply_grants_by_schema(schema)) -%}
|
||||
{%- endif -%}
|
||||
SELECT '{{ model.schema }}' as schema_
|
||||
{%- endmacro -%}
|
||||
@ -208,7 +245,7 @@
|
||||
{%- else -%}
|
||||
{%- do log("Deploy marketplace udfs: " ~ this.database ~ "." ~ schema, true) -%}
|
||||
{%- endif -%}
|
||||
{%- do run_query(sql ~ apply_grants_by_schema(schema)) -%}
|
||||
{%- do run_query(sql ~ livequery_models.apply_grants_by_schema(schema)) -%}
|
||||
{%- endif -%}
|
||||
SELECT '{{ model.schema }}' as schema_
|
||||
{%- endmacro -%}
|
||||
|
||||
63
macros/livequery/udf_compatibility.sql
Normal file
63
macros/livequery/udf_compatibility.sql
Normal file
@ -0,0 +1,63 @@
|
||||
{% macro get_streamline_stack_version() -%}
|
||||
{# Determine the environment based on target.name #}
|
||||
{% if target.name == 'dev' %}
|
||||
{% set env = 'stg' %}
|
||||
{% elif target.name == 'prod' %}
|
||||
{% set env = 'prod' %}
|
||||
{% else %}
|
||||
{% set env = 'stg' %}
|
||||
{% endif %}
|
||||
|
||||
{# Extract database prefix if it follows pattern <database_name>_<target_name> #}
|
||||
{% set database_parts = target.database.split('_') %}
|
||||
{% if database_parts|length > 1 and database_parts[-1].lower() == target.name.lower() %}
|
||||
{% set database_prefix = database_parts[:-1]|join('_') %}
|
||||
{% else %}
|
||||
{% set database_prefix = target.database %}
|
||||
{% endif %}
|
||||
|
||||
|
||||
{% set streamline_stack_version_query %}
|
||||
SELECT
|
||||
TAGS:streamline_runtime_version::STRING as runtime_version,
|
||||
TAGS:streamline_infrastructure_version::STRING as infrastructure_version
|
||||
FROM TABLE(STREAMLINE.AWS.CLOUDFORMATION_DESCRIBE_STACKS('{{ env }}', '{{ database_prefix.lower() }}-api-{{ env }}'));
|
||||
{% endset %}
|
||||
|
||||
{% if execute %}
|
||||
{% set result = run_query(streamline_stack_version_query) %}
|
||||
{% if result.rows|length > 0 %}
|
||||
{% set runtime_version = result.rows[0][0] %}
|
||||
{% set infrastructure_version = result.rows[0][1] %}
|
||||
{% set streamline_stack_version = {'runtime_version': runtime_version, 'infrastructure_version': infrastructure_version} %}
|
||||
{% else %}
|
||||
{% set streamline_stack_version = {'runtime_version': false, 'infrastructure_version': false} %}
|
||||
{% endif %}
|
||||
{% else %}
|
||||
{% set streamline_stack_version = {'runtime_version': false, 'infrastructure_version': false} %}
|
||||
{% endif %}
|
||||
|
||||
{{ return(streamline_stack_version) }}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro is_udf_api_v2_compatible() -%}
|
||||
{% set versions = get_streamline_stack_version() %}
|
||||
|
||||
{% if execute and versions.runtime_version %}
|
||||
{# Extract version number from runtime_version string (e.g., "v3.1.2" -> "3.1.2") #}
|
||||
{% set version_str = versions.runtime_version.replace('v', '') %}
|
||||
{% set version_parts = version_str.split('.') %}
|
||||
|
||||
{# Convert to comparable format: major.minor.patch #}
|
||||
{% set major = version_parts[0] | int %}
|
||||
{% set minor = version_parts[1] | int if version_parts|length > 1 else 0 %}
|
||||
{% set patch = version_parts[2] | int if version_parts|length > 2 else 0 %}
|
||||
|
||||
{# Check if version is >= 3.0.0 #}
|
||||
{% set is_compatible = major >= 3 %}
|
||||
{% else %}
|
||||
{% set is_compatible = false %}
|
||||
{% endif %}
|
||||
|
||||
{{ return(is_compatible) }}
|
||||
{%- endmacro -%}
|
||||
@ -4,7 +4,7 @@
|
||||
|
||||
Parameters:
|
||||
method (string): The JSON RPC method to call.
|
||||
params (string): The JSON RPC parameters to pass to the method.
|
||||
params (array): The JSON RPC parameters to pass to the method.
|
||||
blockchain (string): The blockchain to call the method on.
|
||||
network (string): The network to call the method on.
|
||||
Returns:
|
||||
@ -15,10 +15,10 @@
|
||||
live.udf_api(
|
||||
'{endpoint}'
|
||||
,utils.udf_json_rpc_call({{ method }}, {{ params }})
|
||||
,concat_ws('/', 'integration', _utils.udf_provider(), {{ blockchain }}, {{ network }})
|
||||
,concat_ws('/', 'integration', _utils.udf_provider(), '{{ blockchain }}', '{{ network }}')
|
||||
)::VARIANT:data AS data
|
||||
)
|
||||
SELECT
|
||||
COALESCE(data:result, {'error':data:error})
|
||||
FROM result
|
||||
{% endmacro -%}
|
||||
{% endmacro -%}
|
||||
|
||||
288
macros/marketplace/alchemy/README.md
Normal file
288
macros/marketplace/alchemy/README.md
Normal file
@ -0,0 +1,288 @@
|
||||
# Alchemy API Integration
|
||||
|
||||
Comprehensive blockchain data integration using Alchemy's powerful APIs for NFTs, tokens, transfers, and RPC calls across multiple networks.
|
||||
|
||||
## Supported Networks
|
||||
|
||||
- **Ethereum** (`eth-mainnet`)
|
||||
- **Polygon** (`polygon-mainnet`)
|
||||
- **Arbitrum** (`arb-mainnet`)
|
||||
- **Optimism** (`opt-mainnet`)
|
||||
- **Base** (`base-mainnet`)
|
||||
- **And more** - Check [Alchemy's documentation](https://docs.alchemy.com/reference/api-overview) for the latest supported networks
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Alchemy API key from [Alchemy Dashboard](https://dashboard.alchemy.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/ALCHEMY`
|
||||
|
||||
3. Deploy the Alchemy marketplace functions:
|
||||
```bash
|
||||
dbt run --models alchemy__ alchemy_utils__alchemy_utils
|
||||
```
|
||||
|
||||
## Core Functions
|
||||
|
||||
### Utility Functions (`alchemy_utils` schema)
|
||||
|
||||
#### `alchemy_utils.nfts_get(network, path, query_args)`
|
||||
Make GET requests to Alchemy NFT API endpoints.
|
||||
|
||||
#### `alchemy_utils.nfts_post(network, path, body)`
|
||||
Make POST requests to Alchemy NFT API endpoints.
|
||||
|
||||
#### `alchemy_utils.rpc(network, method, params)`
|
||||
Make RPC calls to blockchain networks via Alchemy.
|
||||
|
||||
### NFT Functions (`alchemy` schema)
|
||||
|
||||
#### `alchemy.get_nfts_for_owner(network, owner[, query_args])`
|
||||
Get all NFTs owned by an address.
|
||||
|
||||
#### `alchemy.get_nft_metadata(network, contract_address, token_id)`
|
||||
Get metadata for a specific NFT.
|
||||
|
||||
#### `alchemy.get_nfts_for_collection(network, contract_address[, query_args])`
|
||||
Get all NFTs in a collection.
|
||||
|
||||
#### `alchemy.get_owners_for_nft(network, contract_address, token_id)`
|
||||
Get all owners of a specific NFT.
|
||||
|
||||
### Token Functions
|
||||
|
||||
#### `alchemy.get_token_balances(network, owner[, contract_addresses])`
|
||||
Get token balances for an address.
|
||||
|
||||
#### `alchemy.get_token_metadata(network, contract_address)`
|
||||
Get metadata for a token contract.
|
||||
|
||||
### Transfer Functions
|
||||
|
||||
#### `alchemy.get_asset_transfers(network, query_args)`
|
||||
Get asset transfer data with flexible filtering.
|
||||
|
||||
## Examples
|
||||
|
||||
### NFT Queries
|
||||
|
||||
#### Get NFTs for Owner
|
||||
```sql
|
||||
-- Get all NFTs owned by an address
|
||||
SELECT alchemy.get_nfts_for_owner(
|
||||
'eth-mainnet',
|
||||
'0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b'
|
||||
);
|
||||
|
||||
-- With pagination and filtering
|
||||
SELECT alchemy.get_nfts_for_owner(
|
||||
'eth-mainnet',
|
||||
'0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b',
|
||||
{
|
||||
'pageSize': 100,
|
||||
'contractAddresses': ['0xBC4CA0EdA7647A8aB7C2061c2E118A18a936f13D'] -- BAYC
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
#### Get NFT Metadata
|
||||
```sql
|
||||
-- Get metadata for specific NFT
|
||||
SELECT alchemy.get_nft_metadata(
|
||||
'eth-mainnet',
|
||||
'0xBC4CA0EdA7647A8aB7C2061c2E118A18a936f13D', -- BAYC contract
|
||||
'1234' -- Token ID
|
||||
);
|
||||
```
|
||||
|
||||
#### Get Collection NFTs
|
||||
```sql
|
||||
-- Get all NFTs in a collection
|
||||
SELECT alchemy.get_nfts_for_collection(
|
||||
'eth-mainnet',
|
||||
'0x60E4d786628Fea6478F785A6d7e704777c86a7c6', -- MAYC
|
||||
{
|
||||
'pageSize': 50,
|
||||
'startToken': '0'
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
### Token Queries
|
||||
|
||||
#### Get Token Balances
|
||||
```sql
|
||||
-- Get all token balances for an address
|
||||
SELECT alchemy.get_token_balances(
|
||||
'eth-mainnet',
|
||||
'0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b'
|
||||
);
|
||||
|
||||
-- Get specific token balances
|
||||
SELECT alchemy.get_token_balances(
|
||||
'eth-mainnet',
|
||||
'0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b',
|
||||
['0xA0b86a33E6417e8EdcfCfdD8fb59a3A5b3dB8BFD'] -- USDC
|
||||
);
|
||||
```
|
||||
|
||||
#### Get Token Metadata
|
||||
```sql
|
||||
-- Get token contract information
|
||||
SELECT alchemy.get_token_metadata(
|
||||
'eth-mainnet',
|
||||
'0xA0b86a33E6417e8EdcfCfdD8fb59a3A5b3dB8BFD' -- USDC
|
||||
);
|
||||
```
|
||||
|
||||
### Transfer Analysis
|
||||
|
||||
#### Asset Transfers
|
||||
```sql
|
||||
-- Get recent transfers for an address
|
||||
SELECT alchemy.get_asset_transfers(
|
||||
'eth-mainnet',
|
||||
{
|
||||
'fromAddress': '0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b',
|
||||
'category': ['erc721', 'erc1155'],
|
||||
'maxCount': 100
|
||||
}
|
||||
);
|
||||
|
||||
-- Get transfers for a specific contract
|
||||
SELECT alchemy.get_asset_transfers(
|
||||
'eth-mainnet',
|
||||
{
|
||||
'contractAddresses': ['0xBC4CA0EdA7647A8aB7C2061c2E118A18a936f13D'],
|
||||
'category': ['erc721'],
|
||||
'fromBlock': '0x12A05F200',
|
||||
'toBlock': 'latest'
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
### RPC Calls
|
||||
|
||||
#### Direct Blockchain Queries
|
||||
```sql
|
||||
-- Get latest block number
|
||||
SELECT alchemy_utils.rpc(
|
||||
'eth-mainnet',
|
||||
'eth_blockNumber',
|
||||
[]
|
||||
);
|
||||
|
||||
-- Get block by number
|
||||
SELECT alchemy_utils.rpc(
|
||||
'eth-mainnet',
|
||||
'eth_getBlockByNumber',
|
||||
['0x12A05F200', true]
|
||||
);
|
||||
|
||||
-- Get transaction receipt
|
||||
SELECT alchemy_utils.rpc(
|
||||
'eth-mainnet',
|
||||
'eth_getTransactionReceipt',
|
||||
['0x1234567890abcdef...']
|
||||
);
|
||||
```
|
||||
|
||||
### Multi-Network Analysis
|
||||
|
||||
#### Compare NFT Holdings Across Networks
|
||||
```sql
|
||||
-- Get BAYC holdings on Ethereum
|
||||
WITH eth_nfts AS (
|
||||
SELECT 'ethereum' as network, alchemy.get_nfts_for_owner(
|
||||
'eth-mainnet',
|
||||
'0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b'
|
||||
) as nfts
|
||||
),
|
||||
-- Get NFTs on Polygon
|
||||
polygon_nfts AS (
|
||||
SELECT 'polygon' as network, alchemy.get_nfts_for_owner(
|
||||
'polygon-mainnet',
|
||||
'0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b'
|
||||
) as nfts
|
||||
)
|
||||
SELECT network, nfts:totalCount::INTEGER as nft_count
|
||||
FROM eth_nfts
|
||||
UNION ALL
|
||||
SELECT network, nfts:totalCount::INTEGER
|
||||
FROM polygon_nfts;
|
||||
```
|
||||
|
||||
### Advanced Analytics
|
||||
|
||||
#### NFT Floor Price Tracking
|
||||
```sql
|
||||
-- Track collection stats over time
|
||||
WITH collection_data AS (
|
||||
SELECT alchemy.get_nfts_for_collection(
|
||||
'eth-mainnet',
|
||||
'0xBC4CA0EdA7647A8aB7C2061c2E118A18a936f13D', -- BAYC
|
||||
{'pageSize': 1}
|
||||
) as collection_info
|
||||
)
|
||||
SELECT
|
||||
collection_info:contract:name::STRING as collection_name,
|
||||
collection_info:contract:totalSupply::INTEGER as total_supply,
|
||||
CURRENT_TIMESTAMP as snapshot_time
|
||||
FROM collection_data;
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
Handle API errors and rate limits:
|
||||
|
||||
```sql
|
||||
WITH api_response AS (
|
||||
SELECT alchemy.get_nfts_for_owner(
|
||||
'eth-mainnet',
|
||||
'0xinvalid-address'
|
||||
) as response
|
||||
)
|
||||
SELECT
|
||||
CASE
|
||||
WHEN response:error IS NOT NULL THEN
|
||||
CONCAT('API Error: ', response:error:message::STRING)
|
||||
WHEN response:ownedNfts IS NOT NULL THEN
|
||||
CONCAT('Success: Found ', ARRAY_SIZE(response:ownedNfts), ' NFTs')
|
||||
ELSE
|
||||
'Unexpected response format'
|
||||
END as result
|
||||
FROM api_response;
|
||||
```
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
Alchemy API has the following rate limits:
|
||||
- **Free tier**: 300 requests per second
|
||||
- **Growth tier**: 660 requests per second
|
||||
- **Scale tier**: Custom limits
|
||||
|
||||
The functions automatically handle rate limiting through Livequery's retry mechanisms.
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Use pagination**: For large datasets, use `pageSize` and pagination tokens
|
||||
2. **Filter requests**: Use `contractAddresses` to limit scope when possible
|
||||
3. **Cache results**: Store frequently accessed data in tables
|
||||
4. **Monitor usage**: Track API calls to stay within limits
|
||||
5. **Network selection**: Choose the most relevant network for your use case
|
||||
|
||||
## Supported Categories
|
||||
|
||||
For asset transfers, use these categories:
|
||||
- `erc20` - ERC-20 token transfers
|
||||
- `erc721` - NFT transfers
|
||||
- `erc1155` - Multi-token standard transfers
|
||||
- `internal` - Internal ETH transfers
|
||||
- `external` - External ETH transfers
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Alchemy API Reference](https://docs.alchemy.com/reference/api-overview)
|
||||
- [NFT API](https://docs.alchemy.com/reference/nft-api-quickstart)
|
||||
- [Token API](https://docs.alchemy.com/reference/token-api-quickstart)
|
||||
- [Enhanced API Methods](https://docs.alchemy.com/reference/enhanced-api-quickstart)
|
||||
@ -2,10 +2,14 @@
|
||||
SELECT {{ schema_name -}}.nfts_get(NETWORK, '/{{api_path}}', QUERY_ARGS) as response
|
||||
{% endmacro %}
|
||||
|
||||
{% macro alchemy_nft_get_api_call_version(schema_name, api_path, version) %}
|
||||
SELECT {{ schema_name -}}.nfts_get(NETWORK, '{{version}}', '/{{api_path}}', QUERY_ARGS) as response
|
||||
{% endmacro %}
|
||||
|
||||
{% macro alchemy_nft_post_api_call(schema_name, api_path) %}
|
||||
SELECT {{ schema_name -}}.nfts_post(NETWORK, '/{{api_path}}', BODY) as response
|
||||
{% endmacro %}
|
||||
|
||||
{% macro alchemy_rpc_call(schema_name, method) %}
|
||||
SELECT {{ schema_name -}}.rpc(NETWORK, '{{method}}', PARAMS) as response
|
||||
{% endmacro %}
|
||||
{% endmacro %}
|
||||
|
||||
@ -111,7 +111,7 @@
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Triggers metadata refresh for an entire NFT collection and refreshes stale metadata after a collection reveal/collection changes. [Alchemy docs here](https://docs.alchemy.com/reference/reingestcontract).$$
|
||||
sql: {{ alchemy_nft_get_api_call(utils_schema_name, 'reingestContract') | trim }}
|
||||
sql: {{ alchemy_nft_get_api_call_version(utils_schema_name, 'invalidateContract', 'v3') | trim }}
|
||||
|
||||
- name: {{ schema_name -}}.search_contract_metadata
|
||||
signature:
|
||||
@ -213,4 +213,4 @@
|
||||
COMMENT = $$Generate a summary of attribute prevalence for an NFT collection. [Alchemy docs here](https://docs.alchemy.com/reference/summarizenftattributes).$$
|
||||
sql: {{ alchemy_nft_get_api_call(utils_schema_name, 'summarizeNFTAttributes') | trim }}
|
||||
|
||||
{% endmacro %}
|
||||
{% endmacro %}
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
This macro is used to generate the alchemy base endpoints
|
||||
#}
|
||||
|
||||
- name: {{ schema -}}.nfts_get
|
||||
- name: {{ schema_name }}.nfts_get
|
||||
signature:
|
||||
- [NETWORK, STRING, The blockchain/network]
|
||||
- [PATH, STRING, The path starting with '/']
|
||||
@ -14,15 +14,73 @@
|
||||
COMMENT = $$Used to issue a 'GET' request to the Alchemy NFT API.$$
|
||||
sql: |
|
||||
SELECT
|
||||
live.udf_api(
|
||||
{% set v2_exists = is_udf_api_v2_compatible() %}
|
||||
{% if v2_exists -%}
|
||||
live.udf_api_v2(
|
||||
'GET',
|
||||
concat(
|
||||
'https://', NETWORK,'.g.alchemy.com/nft/v2/{',NETWORK,'}', PATH, '?',
|
||||
utils.udf_object_to_url_query_string(QUERY_ARGS)
|
||||
),
|
||||
{'fsc-quantum-execution-mode': 'async'},
|
||||
{},
|
||||
'_FSC_SYS/ALCHEMY',
|
||||
TRUE
|
||||
)
|
||||
{%- else -%}
|
||||
live.udf_api(
|
||||
'GET',
|
||||
concat(
|
||||
'https://', NETWORK,'.g.alchemy.com/nft/v2/{',NETWORK,'}', PATH, '?',
|
||||
utils.udf_object_to_url_query_string(QUERY_ARGS)
|
||||
),
|
||||
{},
|
||||
{},
|
||||
'_FSC_SYS/ALCHEMY'
|
||||
) as response
|
||||
)
|
||||
{%- endif %}
|
||||
as response
|
||||
|
||||
- name: {{ schema -}}.nfts_post
|
||||
- name: {{ schema_name }}.nfts_get
|
||||
signature:
|
||||
- [NETWORK, STRING, The blockchain/network]
|
||||
- [VERSION, STRING, The version of the API to use]
|
||||
- [PATH, STRING, The path starting with '/']
|
||||
- [QUERY_ARGS, OBJECT, The query arguments]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Used to issue a 'GET' request to the Alchemy NFT API.$$
|
||||
sql: |
|
||||
SELECT
|
||||
{% set v2_exists = is_udf_api_v2_compatible() %}
|
||||
{% if v2_exists -%}
|
||||
live.udf_api_v2(
|
||||
'GET',
|
||||
concat(
|
||||
'https://', NETWORK,'.g.alchemy.com/nft/', VERSION, '/{',NETWORK,'}', PATH, '?',
|
||||
utils.udf_object_to_url_query_string(QUERY_ARGS)
|
||||
),
|
||||
{'fsc-quantum-execution-mode': 'async'},
|
||||
{},
|
||||
'_FSC_SYS/ALCHEMY',
|
||||
TRUE
|
||||
)
|
||||
{%- else -%}
|
||||
live.udf_api(
|
||||
'GET',
|
||||
concat(
|
||||
'https://', NETWORK,'.g.alchemy.com/nft/', VERSION, '/{',NETWORK,'}', PATH, '?',
|
||||
utils.udf_object_to_url_query_string(QUERY_ARGS)
|
||||
),
|
||||
{},
|
||||
{},
|
||||
'_FSC_SYS/ALCHEMY'
|
||||
)
|
||||
{%- endif %}
|
||||
as response
|
||||
|
||||
- name: {{ schema_name }}.nfts_post
|
||||
signature:
|
||||
- [NETWORK, STRING, The blockchain/network]
|
||||
- [PATH, STRING, The path starting with '/']
|
||||
@ -33,15 +91,28 @@
|
||||
COMMENT = $$Used to issue a 'POST' request to the Alchemy NFT API.$$
|
||||
sql: |
|
||||
SELECT
|
||||
{% set v2_exists = is_udf_api_v2_compatible() %}
|
||||
{% if v2_exists -%}
|
||||
live.udf_api_v2(
|
||||
'POST',
|
||||
concat('https://', NETWORK,'.g.alchemy.com/nft/v2/{',NETWORK,'}', PATH),
|
||||
{'fsc-quantum-execution-mode': 'async'},
|
||||
BODY,
|
||||
'_FSC_SYS/ALCHEMY',
|
||||
TRUE
|
||||
)
|
||||
{%- else -%}
|
||||
live.udf_api(
|
||||
'POST',
|
||||
concat('https://', NETWORK,'.g.alchemy.com/nft/v2/{',NETWORK,'}', PATH),
|
||||
{},
|
||||
BODY,
|
||||
'_FSC_SYS/ALCHEMY'
|
||||
) as response
|
||||
)
|
||||
{%- endif %}
|
||||
as response
|
||||
|
||||
- name: {{ schema -}}.rpc
|
||||
- name: {{ schema_name }}.rpc
|
||||
signature:
|
||||
- [NETWORK, STRING, The blockchain/network]
|
||||
- [METHOD, STRING, The RPC method to call]
|
||||
@ -51,10 +122,25 @@
|
||||
options: |
|
||||
COMMENT = $$Used to issue an RPC call to Alchemy.$$
|
||||
sql: |
|
||||
SELECT live.udf_api(
|
||||
'POST',
|
||||
concat('https://', NETWORK,'.g.alchemy.com/v2/{',NETWORK,'}'),
|
||||
{},
|
||||
{'id': 1,'jsonrpc': '2.0','method': METHOD,'params': PARAMS},
|
||||
'_FSC_SYS/ALCHEMY') as response
|
||||
{% endmacro %}
|
||||
SELECT
|
||||
{% set v2_exists = is_udf_api_v2_compatible() %}
|
||||
{% if v2_exists -%}
|
||||
live.udf_api_v2(
|
||||
'POST',
|
||||
concat('https://', NETWORK,'.g.alchemy.com/v2/{',NETWORK,'}'),
|
||||
{'fsc-quantum-execution-mode': 'async'},
|
||||
{'id': 1,'jsonrpc': '2.0','method': METHOD,'params': PARAMS},
|
||||
'_FSC_SYS/ALCHEMY',
|
||||
TRUE
|
||||
)
|
||||
{%- else -%}
|
||||
live.udf_api(
|
||||
'POST',
|
||||
concat('https://', NETWORK,'.g.alchemy.com/v2/{',NETWORK,'}'),
|
||||
{},
|
||||
{'id': 1,'jsonrpc': '2.0','method': METHOD,'params': PARAMS},
|
||||
'_FSC_SYS/ALCHEMY'
|
||||
)
|
||||
{%- endif %}
|
||||
as response
|
||||
{% endmacro %}
|
||||
|
||||
36
macros/marketplace/allday/README.md
Normal file
36
macros/marketplace/allday/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# NBA All Day API Integration
|
||||
|
||||
NBA All Day is Dapper Labs' basketball NFT platform, offering officially licensed NBA Moments as digital collectibles.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your NBA All Day API key from [Dapper Labs developer portal](https://developers.dapperlabs.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/ALLDAY`
|
||||
|
||||
3. Deploy the All Day marketplace functions:
|
||||
```bash
|
||||
dbt run --models allday__ allday_utils__allday_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `allday.get(path, query_args)`
|
||||
Make GET requests to NBA All Day API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get NBA All Day collections
|
||||
SELECT allday.get('/collections', {});
|
||||
|
||||
-- Get specific moment details
|
||||
SELECT allday.get('/moments/12345', {});
|
||||
|
||||
-- Search for moments by player
|
||||
SELECT allday.get('/moments', {'player_id': 'lebron-james'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [NBA All Day API Documentation](https://developers.dapperlabs.com/)
|
||||
39
macros/marketplace/apilayer/README.md
Normal file
39
macros/marketplace/apilayer/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# API Layer Integration
|
||||
|
||||
API Layer provides a comprehensive suite of APIs including currency conversion, geolocation, weather data, and more utility APIs.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your API Layer API key from [API Layer Dashboard](https://apilayer.com/dashboard)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/APILAYER`
|
||||
|
||||
3. Deploy the API Layer marketplace functions:
|
||||
```bash
|
||||
dbt run --models apilayer__ apilayer_utils__apilayer_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `apilayer.get(path, query_args)`
|
||||
Make GET requests to API Layer API endpoints.
|
||||
|
||||
### `apilayer.post(path, body)`
|
||||
Make POST requests to API Layer API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get currency exchange rates
|
||||
SELECT apilayer.get('/exchangerates_data/latest', {'base': 'USD', 'symbols': 'EUR,GBP,JPY'});
|
||||
|
||||
-- Get IP geolocation data
|
||||
SELECT apilayer.get('/ip_api/check', {'ip': '8.8.8.8'});
|
||||
|
||||
-- Validate email address
|
||||
SELECT apilayer.get('/email_validation/check', {'email': 'test@example.com'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [API Layer Documentation](https://apilayer.com/marketplace)
|
||||
@ -17,7 +17,7 @@
|
||||
'GET',
|
||||
concat('https://api.apilayer.com', PATH, '?', utils.udf_object_to_url_query_string(QUERY_ARGS)),
|
||||
{'apikey': '{API_KEY}'},
|
||||
{},
|
||||
NULL,
|
||||
'_FSC_SYS/APILAYER'
|
||||
) as response
|
||||
|
||||
@ -39,4 +39,4 @@
|
||||
'_FSC_SYS/APILAYER'
|
||||
) as response
|
||||
|
||||
{% endmacro %}
|
||||
{% endmacro %}
|
||||
|
||||
39
macros/marketplace/binance/README.md
Normal file
39
macros/marketplace/binance/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Binance API Integration
|
||||
|
||||
Binance is the world's largest cryptocurrency exchange by trading volume, providing access to spot trading, futures, and market data.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Binance API key from [Binance API Management](https://www.binance.com/en/my/settings/api-management)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/BINANCE`
|
||||
|
||||
3. Deploy the Binance marketplace functions:
|
||||
```bash
|
||||
dbt run --models binance__ binance_utils__binance_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `binance.get(path, query_args)`
|
||||
Make GET requests to Binance API endpoints.
|
||||
|
||||
### `binance.post(path, body)`
|
||||
Make POST requests to Binance API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get current Bitcoin price
|
||||
SELECT binance.get('/api/v3/ticker/price', {'symbol': 'BTCUSDT'});
|
||||
|
||||
-- Get 24hr ticker statistics
|
||||
SELECT binance.get('/api/v3/ticker/24hr', {'symbol': 'ETHUSDT'});
|
||||
|
||||
-- Get order book depth
|
||||
SELECT binance.get('/api/v3/depth', {'symbol': 'ADAUSDT', 'limit': 100});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Binance API Documentation](https://binance-docs.github.io/apidocs/spot/en/)
|
||||
45
macros/marketplace/bitquery/README.md
Normal file
45
macros/marketplace/bitquery/README.md
Normal file
@ -0,0 +1,45 @@
|
||||
# Bitquery API Integration
|
||||
|
||||
Bitquery provides GraphQL APIs for blockchain data across multiple networks including Bitcoin, Ethereum, Binance Smart Chain, and many others.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Bitquery API key from [Bitquery IDE](https://ide.bitquery.io/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/BITQUERY`
|
||||
|
||||
3. Deploy the Bitquery marketplace functions:
|
||||
```bash
|
||||
dbt run --models bitquery__ bitquery_utils__bitquery_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `bitquery.get(path, query_args)`
|
||||
Make GET requests to Bitquery API endpoints.
|
||||
|
||||
### `bitquery.post(path, body)`
|
||||
Make POST requests to Bitquery API endpoints for GraphQL queries.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get Ethereum DEX trades
|
||||
SELECT bitquery.post('/graphql', {
|
||||
'query': 'query { ethereum { dexTrades(date: {since: "2023-01-01"}) { count } } }'
|
||||
});
|
||||
|
||||
-- Get Bitcoin transactions
|
||||
SELECT bitquery.post('/graphql', {
|
||||
'query': 'query { bitcoin { transactions(date: {since: "2023-01-01"}) { count } } }'
|
||||
});
|
||||
|
||||
-- Get token transfers on BSC
|
||||
SELECT bitquery.post('/graphql', {
|
||||
'query': 'query { ethereum(network: bsc) { transfers(date: {since: "2023-01-01"}) { count } } }'
|
||||
});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Bitquery API Documentation](https://docs.bitquery.io/)
|
||||
39
macros/marketplace/blockpour/README.md
Normal file
39
macros/marketplace/blockpour/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Blockpour API Integration
|
||||
|
||||
Blockpour provides blockchain infrastructure and data services with high-performance APIs for accessing on-chain data.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Blockpour API key from [Blockpour Dashboard](https://blockpour.com/dashboard)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/BLOCKPOUR`
|
||||
|
||||
3. Deploy the Blockpour marketplace functions:
|
||||
```bash
|
||||
dbt run --models blockpour__ blockpour_utils__blockpour_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `blockpour.get(path, query_args)`
|
||||
Make GET requests to Blockpour API endpoints.
|
||||
|
||||
### `blockpour.post(path, body)`
|
||||
Make POST requests to Blockpour API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get latest block information
|
||||
SELECT blockpour.get('/api/v1/blocks/latest', {});
|
||||
|
||||
-- Get transaction details
|
||||
SELECT blockpour.get('/api/v1/transactions/0x...', {});
|
||||
|
||||
-- Get token balances for an address
|
||||
SELECT blockpour.get('/api/v1/addresses/0x.../tokens', {});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Blockpour API Documentation](https://docs.blockpour.com/)
|
||||
39
macros/marketplace/chainbase/README.md
Normal file
39
macros/marketplace/chainbase/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Chainbase API Integration
|
||||
|
||||
Chainbase provides comprehensive blockchain data infrastructure with APIs for accessing multi-chain data, NFTs, and DeFi protocols.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Chainbase API key from [Chainbase Console](https://console.chainbase.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/CHAINBASE`
|
||||
|
||||
3. Deploy the Chainbase marketplace functions:
|
||||
```bash
|
||||
dbt run --models chainbase__ chainbase_utils__chainbase_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `chainbase.get(path, query_args)`
|
||||
Make GET requests to Chainbase API endpoints.
|
||||
|
||||
### `chainbase.post(path, body)`
|
||||
Make POST requests to Chainbase API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get token metadata
|
||||
SELECT chainbase.get('/v1/token/metadata', {'chain_id': 1, 'contract_address': '0x...'});
|
||||
|
||||
-- Get NFT collections
|
||||
SELECT chainbase.get('/v1/nft/collections', {'chain_id': 1, 'page': 1, 'limit': 20});
|
||||
|
||||
-- Get account token balances
|
||||
SELECT chainbase.get('/v1/account/tokens', {'chain_id': 1, 'address': '0x...', 'limit': 20});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Chainbase API Documentation](https://docs.chainbase.com/)
|
||||
54
macros/marketplace/chainstack/README.md
Normal file
54
macros/marketplace/chainstack/README.md
Normal file
@ -0,0 +1,54 @@
|
||||
# Chainstack API Integration
|
||||
|
||||
Chainstack provides managed blockchain infrastructure with high-performance nodes and APIs for multiple blockchain networks.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Chainstack API key from [Chainstack Console](https://console.chainstack.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/CHAINSTACK`
|
||||
|
||||
3. Deploy the Chainstack marketplace functions:
|
||||
```bash
|
||||
dbt run --models chainstack__ chainstack_utils__chainstack_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `chainstack.get(path, query_args)`
|
||||
Make GET requests to Chainstack API endpoints.
|
||||
|
||||
### `chainstack.post(path, body)`
|
||||
Make POST requests to Chainstack API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get latest block number
|
||||
SELECT chainstack.post('/rpc', {
|
||||
'jsonrpc': '2.0',
|
||||
'method': 'eth_blockNumber',
|
||||
'params': [],
|
||||
'id': 1
|
||||
});
|
||||
|
||||
-- Get account balance
|
||||
SELECT chainstack.post('/rpc', {
|
||||
'jsonrpc': '2.0',
|
||||
'method': 'eth_getBalance',
|
||||
'params': ['0x...', 'latest'],
|
||||
'id': 1
|
||||
});
|
||||
|
||||
-- Get transaction receipt
|
||||
SELECT chainstack.post('/rpc', {
|
||||
'jsonrpc': '2.0',
|
||||
'method': 'eth_getTransactionReceipt',
|
||||
'params': ['0x...'],
|
||||
'id': 1
|
||||
});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Chainstack API Documentation](https://docs.chainstack.com/)
|
||||
179
macros/marketplace/claude/README.md
Normal file
179
macros/marketplace/claude/README.md
Normal file
@ -0,0 +1,179 @@
|
||||
# Claude API Integration
|
||||
|
||||
Anthropic's Claude AI integration for sophisticated text analysis, content generation, and reasoning tasks. This integration provides access to Claude's advanced language models through Snowflake UDFs.
|
||||
|
||||
## Available Models
|
||||
|
||||
- **Claude 3.5 Sonnet**: Latest and most capable model for complex tasks
|
||||
- **Claude 3 Opus**: Powerful model for demanding use cases
|
||||
- **Claude 3 Sonnet**: Balanced performance and speed
|
||||
- **Claude 3 Haiku**: Fast and efficient for simple tasks
|
||||
|
||||
Check [Anthropic's documentation](https://docs.anthropic.com/claude/docs/models-overview) for the latest available models.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Claude API key from [Anthropic Console](https://console.anthropic.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/CLAUDE`
|
||||
|
||||
3. Deploy the Claude marketplace functions:
|
||||
```bash
|
||||
dbt run --models claude__ claude_utils__claude_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `claude_utils.post(path, body)`
|
||||
Make POST requests to Claude API endpoints.
|
||||
|
||||
### `claude_utils.get(path)`
|
||||
Make GET requests to Claude API endpoints.
|
||||
|
||||
### `claude_utils.delete_method(path)`
|
||||
Make DELETE requests to Claude API endpoints.
|
||||
|
||||
### `claude.chat_completions(messages[, model, max_tokens, temperature])`
|
||||
Send messages to Claude for chat completion.
|
||||
|
||||
### `claude.extract_response_text(claude_response)`
|
||||
Extract text content from Claude API responses.
|
||||
|
||||
## Examples
|
||||
|
||||
### Basic Chat
|
||||
```sql
|
||||
-- Simple conversation with Claude
|
||||
SELECT claude.chat_completions([
|
||||
{'role': 'user', 'content': 'Explain quantum computing in simple terms'}
|
||||
]);
|
||||
```
|
||||
|
||||
### Chat with System Prompt
|
||||
```sql
|
||||
-- Chat with system message and conversation history
|
||||
SELECT claude.chat_completions([
|
||||
{'role': 'system', 'content': 'You are a helpful data analyst.'},
|
||||
{'role': 'user', 'content': 'How do I optimize this SQL query?'},
|
||||
{'role': 'assistant', 'content': 'I can help you optimize your SQL query...'},
|
||||
{'role': 'user', 'content': 'SELECT * FROM large_table WHERE date > "2023-01-01"'}
|
||||
]);
|
||||
```
|
||||
|
||||
### Text Analysis
|
||||
```sql
|
||||
-- Analyze text sentiment and themes
|
||||
SELECT claude.chat_completions([
|
||||
{'role': 'user', 'content': 'Analyze the sentiment and key themes in this customer feedback: "The product is okay but customer service was terrible. Took forever to get help."'}
|
||||
]);
|
||||
```
|
||||
|
||||
### Code Generation
|
||||
```sql
|
||||
-- Generate Python code
|
||||
SELECT claude.chat_completions([
|
||||
{'role': 'user', 'content': 'Write a Python function to calculate the moving average of a list of numbers'}
|
||||
]);
|
||||
```
|
||||
|
||||
### Extract Response Text
|
||||
```sql
|
||||
-- Get just the text content from Claude's response
|
||||
WITH claude_response AS (
|
||||
SELECT claude.chat_completions([
|
||||
{'role': 'user', 'content': 'What is machine learning?'}
|
||||
]) as response
|
||||
)
|
||||
SELECT claude.extract_response_text(response) as answer
|
||||
FROM claude_response;
|
||||
```
|
||||
|
||||
### Batch Text Processing
|
||||
```sql
|
||||
-- Process multiple texts
|
||||
WITH texts AS (
|
||||
SELECT * FROM VALUES
|
||||
('Great product, highly recommend!'),
|
||||
('Terrible experience, would not buy again'),
|
||||
('Average quality, nothing special')
|
||||
AS t(feedback)
|
||||
)
|
||||
SELECT
|
||||
feedback,
|
||||
claude.extract_response_text(
|
||||
claude.chat_completions([
|
||||
{'role': 'user', 'content': CONCAT('Analyze sentiment (positive/negative/neutral): ', feedback)}
|
||||
])
|
||||
) as sentiment
|
||||
FROM texts;
|
||||
```
|
||||
|
||||
### Different Models
|
||||
```sql
|
||||
-- Use specific Claude model
|
||||
SELECT claude.chat_completions(
|
||||
[{'role': 'user', 'content': 'Write a complex analysis of market trends'}],
|
||||
'claude-3-opus-20240229', -- Use Opus for complex reasoning
|
||||
2000, -- max_tokens
|
||||
0.3 -- temperature
|
||||
);
|
||||
```
|
||||
|
||||
## Integration with GitHub Actions
|
||||
|
||||
This Claude integration is used by the GitHub Actions failure analysis system:
|
||||
|
||||
```sql
|
||||
-- Analyze GitHub Actions failures with Claude
|
||||
SELECT claude.extract_response_text(
|
||||
claude.chat_completions([
|
||||
{'role': 'user', 'content': CONCAT(
|
||||
'Analyze this CI/CD failure and provide root cause analysis: ',
|
||||
error_logs
|
||||
)}
|
||||
])
|
||||
) as ai_analysis
|
||||
FROM github_failures;
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
Check for errors in Claude responses:
|
||||
|
||||
```sql
|
||||
WITH response AS (
|
||||
SELECT claude.chat_completions([
|
||||
{'role': 'user', 'content': 'Hello Claude'}
|
||||
]) as result
|
||||
)
|
||||
SELECT
|
||||
CASE
|
||||
WHEN result:error IS NOT NULL THEN result:error:message::STRING
|
||||
ELSE claude.extract_response_text(result)
|
||||
END as final_response
|
||||
FROM response;
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Use appropriate models**: Haiku for simple tasks, Opus for complex reasoning
|
||||
2. **Set token limits**: Control costs with reasonable `max_tokens` values
|
||||
3. **Temperature control**: Lower values (0.1-0.3) for factual tasks, higher (0.7-1.0) for creative tasks
|
||||
4. **Context management**: Include relevant conversation history for better responses
|
||||
5. **Error handling**: Always check for API errors in responses
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
Claude API has usage limits based on your plan. The functions automatically handle rate limiting through Livequery's retry mechanisms.
|
||||
|
||||
## Security
|
||||
|
||||
- API keys are securely stored in Snowflake secrets
|
||||
- All communication uses HTTPS encryption
|
||||
- No sensitive data is logged or cached
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Claude API Reference](https://docs.anthropic.com/claude/reference/getting-started-with-the-api)
|
||||
- [Model Comparison](https://docs.anthropic.com/claude/docs/models-overview)
|
||||
- [Usage Guidelines](https://docs.anthropic.com/claude/docs/use-case-guides)
|
||||
18
macros/marketplace/claude/claude_config_utils.sql
Normal file
18
macros/marketplace/claude/claude_config_utils.sql
Normal file
@ -0,0 +1,18 @@
|
||||
{% macro claude_get_api_call(schema_name, api_path) %}
|
||||
SELECT {{ schema_name }}.get(
|
||||
'{{ api_path }}'
|
||||
) as response
|
||||
{% endmacro %}
|
||||
|
||||
{% macro claude_post_api_call(schema_name, api_path, body) %}
|
||||
SELECT {{ schema_name }}.post(
|
||||
'{{ api_path }}',
|
||||
{{ body }}
|
||||
) as response
|
||||
{% endmacro %}
|
||||
|
||||
{% macro claude_delete_api_call(schema_name, api_path) %}
|
||||
SELECT {{ schema_name }}.delete_method(
|
||||
'{{ api_path }}'
|
||||
) as response
|
||||
{% endmacro %}
|
||||
112
macros/marketplace/claude/messages_batch_udfs.yaml.sql
Normal file
112
macros/marketplace/claude/messages_batch_udfs.yaml.sql
Normal file
@ -0,0 +1,112 @@
|
||||
{% macro config_claude_messages_batch_udfs(schema_name = "claude", utils_schema_name = "claude_utils") -%}
|
||||
{#
|
||||
This macro is used to generate API calls to Claude API endpoints
|
||||
#}
|
||||
|
||||
{# Claude API Messages Batch #}
|
||||
- name: {{ schema_name -}}.post_messages_batch
|
||||
signature:
|
||||
- [MESSAGES, OBJECT, Object of array of message objects]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Send a batch of messages to Claude and get responses [API docs: Messages Batch](https://docs.anthropic.com/en/api/creating-message-batches)$$
|
||||
sql: |
|
||||
SELECT claude_utils.post_api(
|
||||
'/v1/messages/batches',
|
||||
MESSAGES
|
||||
) as response
|
||||
|
||||
{# Claude API Messages Batch Operations #}
|
||||
- name: {{ schema_name -}}.get_message_batch
|
||||
signature:
|
||||
- [MESSAGE_BATCH_ID, STRING, ID of the Message Batch to retrieve]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Retrieve details of a specific Message Batch [API docs: Retrieve Message Batch](https://docs.anthropic.com/en/api/retrieving-message-batches)$$
|
||||
sql: |
|
||||
SELECT claude_utils.get_api(
|
||||
CONCAT('/v1/messages/batches/', MESSAGE_BATCH_ID)
|
||||
) as response
|
||||
|
||||
- name: {{ schema_name -}}.get_message_batch_results
|
||||
signature:
|
||||
- [MESSAGE_BATCH_ID, STRING, ID of the Message Batch to retrieve results for]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Retrieve results of a Message Batch [API docs: Retrieve Message Batch Results](https://docs.anthropic.com/en/api/retrieving-message-batches)$$
|
||||
sql: |
|
||||
SELECT claude_utils.get_api(
|
||||
CONCAT('/v1/messages/batches/', MESSAGE_BATCH_ID, '/results')
|
||||
) as response
|
||||
|
||||
- name: {{ schema_name -}}.list_message_batches
|
||||
signature: []
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$List all Message Batches [API docs: List Message Batches](https://docs.anthropic.com/en/api/retrieving-message-batches)$$
|
||||
sql: |
|
||||
SELECT claude_utils.get_api(
|
||||
'/v1/messages/batches'
|
||||
) as response
|
||||
|
||||
- name: {{ schema_name -}}.list_message_batches_with_before
|
||||
signature:
|
||||
- [BEFORE_ID, STRING, ID of the Message Batch to start listing from]
|
||||
- [LIMIT, INTEGER, Maximum number of Message Batches to return]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$List all Message Batches [API docs: List Message Batches](https://docs.anthropic.com/en/api/retrieving-message-batches)$$
|
||||
sql: |
|
||||
SELECT claude_utils.get_api(
|
||||
CONCAT('/v1/messages/batches',
|
||||
'?before_id=', COALESCE(BEFORE_ID, ''),
|
||||
'&limit=', COALESCE(LIMIT::STRING, '')
|
||||
)
|
||||
) as response
|
||||
|
||||
- name: {{ schema_name -}}.list_message_batches_with_after
|
||||
signature:
|
||||
- [AFTER_ID, STRING, ID of the Message Batch to start listing from]
|
||||
- [LIMIT, INTEGER, Maximum number of Message Batches to return]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$List all Message Batches [API docs: List Message Batches](https://docs.anthropic.com/en/api/retrieving-message-batches)$$
|
||||
sql: |
|
||||
SELECT claude_utils.get_api(
|
||||
CONCAT('/v1/messages/batches',
|
||||
'?after_id=', COALESCE(AFTER_ID, ''),
|
||||
'&limit=', COALESCE(LIMIT::STRING, '')
|
||||
)
|
||||
) as response
|
||||
- name: {{ schema_name -}}.cancel_message_batch
|
||||
signature:
|
||||
- [MESSAGE_BATCH_ID, STRING, ID of the Message Batch to cancel]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Cancel a Message Batch [API docs: Cancel Message Batch](https://docs.anthropic.com/en/api/retrieving-message-batches)$$
|
||||
sql: |
|
||||
SELECT claude_utils.post_api(
|
||||
CONCAT('/v1/messages/batches/', MESSAGE_BATCH_ID, '/cancel'),
|
||||
{}
|
||||
) as response
|
||||
|
||||
- name: {{ schema_name -}}.delete_message_batch
|
||||
signature:
|
||||
- [MESSAGE_BATCH_ID, STRING, ID of the Message Batch to delete]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Delete a Message Batch [API docs: Delete Message Batch](https://docs.anthropic.com/en/api/retrieving-message-batches)$$
|
||||
sql: |
|
||||
SELECT claude_utils.delete_method(
|
||||
CONCAT('/v1/messages/batches/', MESSAGE_BATCH_ID)
|
||||
) as response
|
||||
|
||||
{% endmacro %}
|
||||
87
macros/marketplace/claude/messages_udfs.yaml.sql
Normal file
87
macros/marketplace/claude/messages_udfs.yaml.sql
Normal file
@ -0,0 +1,87 @@
|
||||
{% macro config_claude_messages_udfs(schema_name = "claude", utils_schema_name = "claude_utils") -%}
|
||||
{#
|
||||
This macro is used to generate API calls to Claude API endpoints
|
||||
#}
|
||||
|
||||
{# Claude API Messages #}
|
||||
- name: {{ schema_name -}}.post_messages
|
||||
signature:
|
||||
- [MESSAGES, ARRAY, Array of message objects]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Send a message to Claude and get a response [API docs: Messages](https://docs.anthropic.com/claude/reference/messages_post)$$
|
||||
sql: |
|
||||
SELECT claude_utils.post_api(
|
||||
'/v1/messages',
|
||||
{
|
||||
'model': 'claude-3-5-sonnet-20241022',
|
||||
'messages': MESSAGES,
|
||||
'max_tokens': 4096
|
||||
}
|
||||
) as response
|
||||
|
||||
- name: {{ schema_name -}}.post_messages
|
||||
signature:
|
||||
- [MODEL, STRING, The model to use (e.g. 'claude-3-opus-20240229')]
|
||||
- [MESSAGES, ARRAY, Array of message objects]
|
||||
- [MAX_TOKENS, INTEGER, Maximum number of tokens to generate]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Send a message to Claude and get a response [API docs: Messages](https://docs.anthropic.com/claude/reference/messages_post)$$
|
||||
sql: |
|
||||
SELECT claude_utils.post_api(
|
||||
'/v1/messages',
|
||||
{
|
||||
'model': COALESCE(MODEL, 'claude-3-5-sonnet-20241022'),
|
||||
'messages': MESSAGES,
|
||||
'max_tokens': COALESCE(MAX_TOKENS, 1024)
|
||||
}
|
||||
) as response
|
||||
|
||||
- name: {{ schema_name -}}.post_messages
|
||||
signature:
|
||||
- [MODEL, STRING, The model to use (e.g. 'claude-3-opus-20240229')]
|
||||
- [MESSAGES, ARRAY, Array of message objects]
|
||||
- [MAX_TOKENS, INTEGER, Maximum number of tokens to generate]
|
||||
- [TEMPERATURE, FLOAT, Temperature for sampling (0-1)]
|
||||
- [TOP_K, INTEGER, Top K for sampling]
|
||||
- [TOP_P, FLOAT, Top P for sampling]
|
||||
- [SYSTEM, STRING, System prompt to use]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Send a message to Claude and get a response [API docs: Messages](https://docs.anthropic.com/claude/reference/messages_post)$$
|
||||
sql: |
|
||||
SELECT claude_utils.post_api(
|
||||
'/v1/messages',
|
||||
{
|
||||
'model': MODEL,
|
||||
'messages': MESSAGES,
|
||||
'max_tokens': MAX_TOKENS,
|
||||
'temperature': TEMPERATURE,
|
||||
'top_k': TOP_K,
|
||||
'top_p': TOP_P,
|
||||
'system': SYSTEM
|
||||
}
|
||||
) as response
|
||||
|
||||
- name: {{ schema_name -}}.count_message_tokens
|
||||
signature:
|
||||
- [MODEL, STRING, The model to use (e.g. 'claude-3-5-sonnet-20241022')]
|
||||
- [MESSAGES, ARRAY, Array of message objects]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Count tokens in a message array before sending to Claude [API docs: Count Tokens](https://docs.anthropic.com/claude/reference/counting-tokens)$$
|
||||
sql: |
|
||||
SELECT claude_utils.post_api(
|
||||
'/v1/messages/count_tokens',
|
||||
{
|
||||
'model': COALESCE(MODEL, 'claude-3-5-sonnet-20241022'),
|
||||
'messages': MESSAGES
|
||||
}
|
||||
) as response
|
||||
|
||||
{% endmacro %}
|
||||
30
macros/marketplace/claude/models_udfs.yaml.sql
Normal file
30
macros/marketplace/claude/models_udfs.yaml.sql
Normal file
@ -0,0 +1,30 @@
|
||||
{% macro config_claude_models_udfs(schema_name = "claude", utils_schema_name = "claude_utils") -%}
|
||||
{#
|
||||
This macro is used to generate API calls to Claude API endpoints
|
||||
#}
|
||||
|
||||
{# Claude API Models #}
|
||||
- name: {{ schema_name -}}.list_models
|
||||
signature: []
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$List available Claude models [API docs: List Models](https://docs.anthropic.com/claude/reference/models_get)$$
|
||||
sql: |
|
||||
SELECT claude_utils.get_api(
|
||||
'/v1/models'
|
||||
) as response
|
||||
|
||||
- name: {{ schema_name -}}.get_model
|
||||
signature:
|
||||
- [MODEL, STRING, The model name to get details for (e.g. 'claude-3-opus-20240229')]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Get details for a specific Claude model [API docs: Get Model](https://docs.anthropic.com/claude/reference/models_retrieve)$$
|
||||
sql: |
|
||||
SELECT claude_utils.get_api(
|
||||
CONCAT('/v1/models/', MODEL)
|
||||
) as response
|
||||
|
||||
{% endmacro %}
|
||||
137
macros/marketplace/claude/util_udfs.yaml.sql
Normal file
137
macros/marketplace/claude/util_udfs.yaml.sql
Normal file
@ -0,0 +1,137 @@
|
||||
{% macro config_claude_utils_udfs(schema_name = "claude_utils", utils_schema_name = "claude_utils") -%}
|
||||
{#
|
||||
This macro is used to generate API calls to Claude API endpoints
|
||||
#}
|
||||
- name: {{ schema_name -}}.post_api
|
||||
signature:
|
||||
- [PATH, STRING, The API endpoint path]
|
||||
- [BODY, OBJECT, The request body]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Make calls to Claude API [API docs: Claude](https://docs.anthropic.com/claude/reference/getting-started-with-the-api)$$
|
||||
sql: |
|
||||
SELECT
|
||||
{% set v2_exists = is_udf_api_v2_compatible() %}
|
||||
{% if v2_exists -%}
|
||||
live.udf_api_v2(
|
||||
'POST',
|
||||
CONCAT('https://api.anthropic.com', PATH),
|
||||
{
|
||||
'anthropic-version': '2023-06-01',
|
||||
'x-api-key': '{API_KEY}',
|
||||
'content-type': 'application/json'
|
||||
},
|
||||
BODY,
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(),
|
||||
'_FSC_SYS/CLAUDE',
|
||||
'Vault/prod/data_platform/claude'
|
||||
),
|
||||
TRUE
|
||||
)
|
||||
{%- else -%}
|
||||
live.udf_api(
|
||||
'POST',
|
||||
CONCAT('https://api.anthropic.com', PATH),
|
||||
{
|
||||
'anthropic-version': '2023-06-01',
|
||||
'x-api-key': '{API_KEY}',
|
||||
'content-type': 'application/json'
|
||||
},
|
||||
BODY,
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(),
|
||||
'_FSC_SYS/CLAUDE',
|
||||
'Vault/prod/data_platform/claude'
|
||||
)
|
||||
)
|
||||
{%- endif %}
|
||||
as response
|
||||
|
||||
- name: {{ schema_name -}}.get_api
|
||||
signature:
|
||||
- [PATH, STRING, The API endpoint path]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Make GET requests to Claude API [API docs: Get](https://docs.anthropic.com/claude/reference/get)$$
|
||||
sql: |
|
||||
SELECT
|
||||
{% set v2_exists = is_udf_api_v2_compatible() %}
|
||||
{% if v2_exists -%}
|
||||
live.udf_api_v2(
|
||||
'GET',
|
||||
CONCAT('https://api.anthropic.com', PATH),
|
||||
{
|
||||
'anthropic-version': '2023-06-01',
|
||||
'x-api-key': '{API_KEY}',
|
||||
'content-type': 'application/json'
|
||||
},
|
||||
NULL,
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(),
|
||||
'_FSC_SYS/CLAUDE',
|
||||
'Vault/prod/data_platform/claude'
|
||||
),
|
||||
TRUE
|
||||
)
|
||||
{%- else -%}
|
||||
live.udf_api(
|
||||
'GET',
|
||||
CONCAT('https://api.anthropic.com', PATH),
|
||||
{
|
||||
'anthropic-version': '2023-06-01',
|
||||
'x-api-key': '{API_KEY}',
|
||||
'content-type': 'application/json'
|
||||
},
|
||||
NULL,
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(),
|
||||
'_FSC_SYS/CLAUDE',
|
||||
'Vault/prod/data_platform/claude'
|
||||
)
|
||||
)
|
||||
{%- endif %}
|
||||
as response
|
||||
|
||||
- name: {{ schema_name -}}.delete_method
|
||||
signature:
|
||||
- [PATH, STRING, The API endpoint path]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Make DELETE requests to Claude API [API docs: Delete](https://docs.anthropic.com/claude/reference/delete)$$
|
||||
sql: |
|
||||
SELECT
|
||||
{% set v2_exists = is_udf_api_v2_compatible() %}
|
||||
{% if v2_exists -%}
|
||||
live.udf_api_v2(
|
||||
'DELETE',
|
||||
CONCAT('https://api.anthropic.com', PATH),
|
||||
{
|
||||
'anthropic-version': '2023-06-01',
|
||||
'x-api-key': '{API_KEY}',
|
||||
'content-type': 'application/json'
|
||||
},
|
||||
NULL,
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(),
|
||||
'_FSC_SYS/CLAUDE',
|
||||
'Vault/prod/data_platform/claude'
|
||||
),
|
||||
TRUE
|
||||
)
|
||||
{%- else -%}
|
||||
live.udf_api(
|
||||
'DELETE',
|
||||
CONCAT('https://api.anthropic.com', PATH),
|
||||
{
|
||||
'anthropic-version': '2023-06-01',
|
||||
'x-api-key': '{API_KEY}',
|
||||
'content-type': 'application/json'
|
||||
},
|
||||
NULL,
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(),
|
||||
'_FSC_SYS/CLAUDE',
|
||||
'Vault/prod/data_platform/claude'
|
||||
)
|
||||
)
|
||||
{%- endif %}
|
||||
as response
|
||||
{% endmacro %}
|
||||
36
macros/marketplace/cmc/README.md
Normal file
36
macros/marketplace/cmc/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# CoinMarketCap API Integration
|
||||
|
||||
CoinMarketCap is a leading cryptocurrency market data platform providing real-time and historical cryptocurrency prices, market capitalizations, and trading volumes.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your CoinMarketCap API key from [CoinMarketCap Pro API](https://pro.coinmarketcap.com/account)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/CMC`
|
||||
|
||||
3. Deploy the CoinMarketCap marketplace functions:
|
||||
```bash
|
||||
dbt run --models cmc__ cmc_utils__cmc_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `cmc.get(path, query_args)`
|
||||
Make GET requests to CoinMarketCap API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get latest cryptocurrency listings
|
||||
SELECT cmc.get('/v1/cryptocurrency/listings/latest', {'limit': 100});
|
||||
|
||||
-- Get specific cryptocurrency quotes
|
||||
SELECT cmc.get('/v2/cryptocurrency/quotes/latest', {'symbol': 'BTC,ETH,ADA'});
|
||||
|
||||
-- Get cryptocurrency metadata
|
||||
SELECT cmc.get('/v2/cryptocurrency/info', {'symbol': 'BTC'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [CoinMarketCap API Documentation](https://coinmarketcap.com/api/documentation/v1/)
|
||||
@ -17,7 +17,7 @@
|
||||
'GET',
|
||||
concat('https://pro-api.coinmarketcap.com', PATH, '?', utils.udf_object_to_url_query_string(QUERY_ARGS)),
|
||||
{'X-CMC_PRO_API_KEY': '{API_KEY}'},
|
||||
{},
|
||||
NULL,
|
||||
'_FSC_SYS/CMC'
|
||||
) as response
|
||||
|
||||
@ -39,4 +39,4 @@
|
||||
'_FSC_SYS/CMC'
|
||||
) as response
|
||||
|
||||
{% endmacro %}
|
||||
{% endmacro %}
|
||||
|
||||
76
macros/marketplace/coingecko/README.md
Normal file
76
macros/marketplace/coingecko/README.md
Normal file
@ -0,0 +1,76 @@
|
||||
# CoinGecko API Integration
|
||||
|
||||
Comprehensive cryptocurrency market data integration using CoinGecko's Pro API for prices, market data, and trading information.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your CoinGecko Pro API key from [CoinGecko Pro](https://pro.coingecko.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/COINGECKO`
|
||||
|
||||
3. Deploy the CoinGecko marketplace functions:
|
||||
```bash
|
||||
dbt run --models coingecko__ coingecko_utils__coingecko_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `coingecko.get(path, query_args)`
|
||||
Make GET requests to CoinGecko Pro API endpoints.
|
||||
|
||||
### `coingecko.post(path, body)`
|
||||
Make POST requests to CoinGecko Pro API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
### Price Data
|
||||
```sql
|
||||
-- Get current price for Bitcoin
|
||||
SELECT coingecko.get('/api/v3/simple/price', {
|
||||
'ids': 'bitcoin',
|
||||
'vs_currencies': 'usd,eth',
|
||||
'include_24hr_change': 'true'
|
||||
});
|
||||
|
||||
-- Get historical prices
|
||||
SELECT coingecko.get('/api/v3/coins/bitcoin/history', {
|
||||
'date': '30-12-2023'
|
||||
});
|
||||
```
|
||||
|
||||
### Market Data
|
||||
```sql
|
||||
-- Get top cryptocurrencies by market cap
|
||||
SELECT coingecko.get('/api/v3/coins/markets', {
|
||||
'vs_currency': 'usd',
|
||||
'order': 'market_cap_desc',
|
||||
'per_page': 100,
|
||||
'page': 1
|
||||
});
|
||||
|
||||
-- Get global cryptocurrency statistics
|
||||
SELECT coingecko.get('/api/v3/global', {});
|
||||
```
|
||||
|
||||
### Token Information
|
||||
```sql
|
||||
-- Get detailed coin information
|
||||
SELECT coingecko.get('/api/v3/coins/ethereum', {
|
||||
'localization': 'false',
|
||||
'tickers': 'false',
|
||||
'market_data': 'true',
|
||||
'community_data': 'true'
|
||||
});
|
||||
```
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
CoinGecko Pro API limits:
|
||||
- **Basic**: 10,000 calls/month
|
||||
- **Premium**: 50,000 calls/month
|
||||
- **Enterprise**: Custom limits
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [CoinGecko Pro API Documentation](https://apiguide.coingecko.com/getting-started/introduction)
|
||||
- [API Endpoints Reference](https://docs.coingecko.com/reference/introduction)
|
||||
36
macros/marketplace/covalent/README.md
Normal file
36
macros/marketplace/covalent/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# Covalent API Integration
|
||||
|
||||
Covalent provides a unified API to access rich blockchain data across multiple networks, offering historical and real-time data for wallets, transactions, and DeFi protocols.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Covalent API key from [Covalent Dashboard](https://www.covalenthq.com/platform/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/COVALENT`
|
||||
|
||||
3. Deploy the Covalent marketplace functions:
|
||||
```bash
|
||||
dbt run --models covalent__ covalent_utils__covalent_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `covalent.get(path, query_args)`
|
||||
Make GET requests to Covalent API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get token balances for an address
|
||||
SELECT covalent.get('/v1/1/address/0x.../balances_v2/', {});
|
||||
|
||||
-- Get transaction history for an address
|
||||
SELECT covalent.get('/v1/1/address/0x.../transactions_v2/', {'page-size': 100});
|
||||
|
||||
-- Get NFTs owned by an address
|
||||
SELECT covalent.get('/v1/1/address/0x.../balances_nft/', {});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Covalent API Documentation](https://www.covalenthq.com/docs/api/)
|
||||
39
macros/marketplace/credmark/README.md
Normal file
39
macros/marketplace/credmark/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Credmark API Integration
|
||||
|
||||
Credmark provides DeFi risk modeling and analytics APIs with comprehensive data on lending protocols, token prices, and risk metrics.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Credmark API key from [Credmark Portal](https://gateway.credmark.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/CREDMARK`
|
||||
|
||||
3. Deploy the Credmark marketplace functions:
|
||||
```bash
|
||||
dbt run --models credmark__ credmark_utils__credmark_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `credmark.get(path, query_args)`
|
||||
Make GET requests to Credmark API endpoints.
|
||||
|
||||
### `credmark.post(path, body)`
|
||||
Make POST requests to Credmark API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get token price
|
||||
SELECT credmark.get('/v1/model/token.price', {'token_address': '0x...', 'block_number': 'latest'});
|
||||
|
||||
-- Get portfolio risk metrics
|
||||
SELECT credmark.post('/v1/model/finance.var-portfolio', {'addresses': ['0x...'], 'window': 30});
|
||||
|
||||
-- Get lending pool information
|
||||
SELECT credmark.get('/v1/model/compound-v2.pool-info', {'token_address': '0x...'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Credmark API Documentation](https://docs.credmark.com/)
|
||||
39
macros/marketplace/dapplooker/README.md
Normal file
39
macros/marketplace/dapplooker/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# DappLooker API Integration
|
||||
|
||||
DappLooker provides blockchain analytics and data visualization platform with APIs for accessing DeFi, NFT, and on-chain metrics across multiple networks.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your DappLooker API key from [DappLooker Dashboard](https://dapplooker.com/dashboard)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/DAPPLOOKER`
|
||||
|
||||
3. Deploy the DappLooker marketplace functions:
|
||||
```bash
|
||||
dbt run --models dapplooker__ dapplooker_utils__dapplooker_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `dapplooker.get(path, query_args)`
|
||||
Make GET requests to DappLooker API endpoints.
|
||||
|
||||
### `dapplooker.post(path, body)`
|
||||
Make POST requests to DappLooker API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get DeFi protocol metrics
|
||||
SELECT dapplooker.get('/api/v1/defi/protocols', {'network': 'ethereum'});
|
||||
|
||||
-- Get NFT collection statistics
|
||||
SELECT dapplooker.get('/api/v1/nft/collections/stats', {'collection': '0x...'});
|
||||
|
||||
-- Get wallet analytics
|
||||
SELECT dapplooker.get('/api/v1/wallet/analytics', {'address': '0x...', 'network': 'ethereum'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [DappLooker API Documentation](https://docs.dapplooker.com/)
|
||||
36
macros/marketplace/dappradar/README.md
Normal file
36
macros/marketplace/dappradar/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# DappRadar API Integration
|
||||
|
||||
DappRadar is a leading DApp analytics platform providing comprehensive data on decentralized applications, DeFi protocols, NFT collections, and blockchain games.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your DappRadar API key from [DappRadar API Dashboard](https://dappradar.com/api)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/DAPPRADAR`
|
||||
|
||||
3. Deploy the DappRadar marketplace functions:
|
||||
```bash
|
||||
dbt run --models dappradar__ dappradar_utils__dappradar_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `dappradar.get(path, query_args)`
|
||||
Make GET requests to DappRadar API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get top DApps by category
|
||||
SELECT dappradar.get('/dapps', {'chain': 'ethereum', 'category': 'defi', 'limit': 50});
|
||||
|
||||
-- Get DApp details
|
||||
SELECT dappradar.get('/dapps/1', {});
|
||||
|
||||
-- Get NFT collection rankings
|
||||
SELECT dappradar.get('/nft/collections', {'chain': 'ethereum', 'range': '24h', 'limit': 100});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [DappRadar API Documentation](https://docs.dappradar.com/)
|
||||
@ -15,8 +15,8 @@
|
||||
SELECT
|
||||
live.udf_api(
|
||||
'GET',
|
||||
concat('https://api.dappradar.com', PATH, '?', utils.udf_object_to_url_query_string(QUERY_ARGS)),
|
||||
{'X-BLOBR-KEY': '{API_KEY}'},
|
||||
concat('https://apis.dappradar.com', PATH, '?', utils.udf_object_to_url_query_string(QUERY_ARGS)),
|
||||
{'X-API-KEY': '{API_KEY}'},
|
||||
{},
|
||||
'_FSC_SYS/DAPPRADAR'
|
||||
) as response
|
||||
@ -33,10 +33,10 @@
|
||||
SELECT
|
||||
live.udf_api(
|
||||
'POST',
|
||||
CONCAT('https://api.dappradar.com', PATH),
|
||||
{'X-BLOBR-KEY': '{API_KEY}'},
|
||||
CONCAT('https://apis.dappradar.com', PATH),
|
||||
{'X-API-KEY': '{API_KEY}'},
|
||||
BODY,
|
||||
'_FSC_SYS/DAPPRADAR'
|
||||
) as response
|
||||
|
||||
{% endmacro %}
|
||||
{% endmacro %}
|
||||
|
||||
39
macros/marketplace/deepnftvalue/README.md
Normal file
39
macros/marketplace/deepnftvalue/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# DeepNFTValue API Integration
|
||||
|
||||
DeepNFTValue provides AI-powered NFT valuation and analytics services, offering price predictions and market insights for NFT collections.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your DeepNFTValue API key from [DeepNFTValue Dashboard](https://deepnftvalue.com/dashboard)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/DEEPNFTVALUE`
|
||||
|
||||
3. Deploy the DeepNFTValue marketplace functions:
|
||||
```bash
|
||||
dbt run --models deepnftvalue__ deepnftvalue_utils__deepnftvalue_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `deepnftvalue.get(path, query_args)`
|
||||
Make GET requests to DeepNFTValue API endpoints.
|
||||
|
||||
### `deepnftvalue.post(path, body)`
|
||||
Make POST requests to DeepNFTValue API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get NFT valuation
|
||||
SELECT deepnftvalue.get('/api/v1/valuation', {'contract_address': '0x...', 'token_id': '1234'});
|
||||
|
||||
-- Get collection analytics
|
||||
SELECT deepnftvalue.get('/api/v1/collection/analytics', {'contract_address': '0x...'});
|
||||
|
||||
-- Get price predictions
|
||||
SELECT deepnftvalue.post('/api/v1/predict', {'contract_address': '0x...', 'token_ids': [1, 2, 3]});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [DeepNFTValue API Documentation](https://docs.deepnftvalue.com/)
|
||||
90
macros/marketplace/defillama/README.md
Normal file
90
macros/marketplace/defillama/README.md
Normal file
@ -0,0 +1,90 @@
|
||||
# DefiLlama API Integration
|
||||
|
||||
DeFi analytics and TVL (Total Value Locked) data integration using DefiLlama's comprehensive DeFi protocol database.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Most DefiLlama endpoints are free and don't require an API key
|
||||
|
||||
2. For premium endpoints, get your API key from [DefiLlama](https://defillama.com/docs/api)
|
||||
|
||||
3. Store the API key in Snowflake secrets under `_FSC_SYS/DEFILLAMA` (if using premium features)
|
||||
|
||||
4. Deploy the DefiLlama marketplace functions:
|
||||
```bash
|
||||
dbt run --models defillama__ defillama_utils__defillama_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `defillama.get(path, query_args)`
|
||||
Make GET requests to DefiLlama API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
### Protocol TVL Data
|
||||
```sql
|
||||
-- Get current TVL for all protocols
|
||||
SELECT defillama.get('/protocols', {});
|
||||
|
||||
-- Get specific protocol information
|
||||
SELECT defillama.get('/protocol/uniswap', {});
|
||||
|
||||
-- Get historical TVL for a protocol
|
||||
SELECT defillama.get('/protocol/aave', {});
|
||||
```
|
||||
|
||||
### Chain TVL Data
|
||||
```sql
|
||||
-- Get TVL for all chains
|
||||
SELECT defillama.get('/chains', {});
|
||||
|
||||
-- Get historical TVL for Ethereum
|
||||
SELECT defillama.get('/historicalChainTvl/Ethereum', {});
|
||||
```
|
||||
|
||||
### Yield Farming Data
|
||||
```sql
|
||||
-- Get current yields
|
||||
SELECT defillama.get('/yields', {});
|
||||
|
||||
-- Get yields for specific protocol
|
||||
SELECT defillama.get('/yields/project/aave', {});
|
||||
```
|
||||
|
||||
### Token Pricing
|
||||
```sql
|
||||
-- Get current token prices
|
||||
SELECT defillama.get('/prices/current/ethereum:0xA0b86a33E6417e8EdcfCfdD8fb59a3A5b3dB8BFD', {});
|
||||
|
||||
-- Get historical token prices
|
||||
SELECT defillama.get('/prices/historical/1640995200/ethereum:0xA0b86a33E6417e8EdcfCfdD8fb59a3A5b3dB8BFD', {});
|
||||
```
|
||||
|
||||
### Stablecoin Data
|
||||
```sql
|
||||
-- Get stablecoin market caps
|
||||
SELECT defillama.get('/stablecoins', {});
|
||||
|
||||
-- Get specific stablecoin information
|
||||
SELECT defillama.get('/stablecoin/1', {}); -- USDT
|
||||
```
|
||||
|
||||
### Bridge Data
|
||||
```sql
|
||||
-- Get bridge volumes
|
||||
SELECT defillama.get('/bridges', {});
|
||||
|
||||
-- Get specific bridge information
|
||||
SELECT defillama.get('/bridge/1', {});
|
||||
```
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
DefiLlama API is generally rate-limited to prevent abuse. Most endpoints are free to use.
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [DefiLlama API Documentation](https://defillama.com/docs/api)
|
||||
- [TVL API](https://defillama.com/docs/api#operations-tag-TVL)
|
||||
- [Yields API](https://defillama.com/docs/api#operations-tag-Yields)
|
||||
@ -17,8 +17,8 @@
|
||||
'GET',
|
||||
concat('https://api.llama.fi', PATH, '?', utils.udf_object_to_url_query_string(QUERY_ARGS)),
|
||||
{'Accept': '*/*', 'User-Agent': 'livequery/1.0 (Snowflake)', 'Host':'api.llama.fi', 'Connection': 'keep-alive'},
|
||||
{},
|
||||
NULL,
|
||||
IFF(ARRAY_CONTAINS('api_key'::VARIANT, OBJECT_KEYS(QUERY_ARGS)), '_FSC_SYS/DEFILLAMA', '')
|
||||
) as response
|
||||
|
||||
{% endmacro %}
|
||||
{% endmacro %}
|
||||
|
||||
74
macros/marketplace/dune/README.md
Normal file
74
macros/marketplace/dune/README.md
Normal file
@ -0,0 +1,74 @@
|
||||
# Dune Analytics API Integration
|
||||
|
||||
Access Dune Analytics queries and results directly from Snowflake for blockchain data analysis and visualization.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Dune API key from [Dune Analytics](https://dune.com/settings/api)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/DUNE`
|
||||
|
||||
3. Deploy the Dune marketplace functions:
|
||||
```bash
|
||||
dbt run --models dune__ dune_utils__dune_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `dune.get(path, query_args)`
|
||||
Make GET requests to Dune API endpoints.
|
||||
|
||||
### `dune.post(path, body)`
|
||||
Make POST requests to Dune API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
### Execute Queries
|
||||
```sql
|
||||
-- Execute a Dune query
|
||||
SELECT dune.post('/api/v1/query/1234567/execute', {
|
||||
'query_parameters': {
|
||||
'token_address': '0xA0b86a33E6417e8EdcfCfdD8fb59a3A5b3dB8BFD'
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
### Get Query Results
|
||||
```sql
|
||||
-- Get results from executed query
|
||||
SELECT dune.get('/api/v1/execution/01234567-89ab-cdef-0123-456789abcdef/results', {});
|
||||
|
||||
-- Get latest results for a query
|
||||
SELECT dune.get('/api/v1/query/1234567/results', {});
|
||||
```
|
||||
|
||||
### Query Status
|
||||
```sql
|
||||
-- Check execution status
|
||||
SELECT dune.get('/api/v1/execution/01234567-89ab-cdef-0123-456789abcdef/status', {});
|
||||
```
|
||||
|
||||
### Parameterized Queries
|
||||
```sql
|
||||
-- Execute query with parameters
|
||||
SELECT dune.post('/api/v1/query/1234567/execute', {
|
||||
'query_parameters': {
|
||||
'start_date': '2023-01-01',
|
||||
'end_date': '2023-12-31',
|
||||
'min_amount': 1000
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
Dune API rate limits vary by plan:
|
||||
- **Free**: 20 executions per day
|
||||
- **Plus**: 1,000 executions per day
|
||||
- **Premium**: 10,000 executions per day
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Dune API Documentation](https://dune.com/docs/api/)
|
||||
- [Authentication](https://dune.com/docs/api/api-reference/authentication/)
|
||||
- [Query Execution](https://dune.com/docs/api/api-reference/execute-queries/)
|
||||
36
macros/marketplace/espn/README.md
Normal file
36
macros/marketplace/espn/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# ESPN API Integration
|
||||
|
||||
ESPN provides comprehensive sports data including scores, schedules, player statistics, and news across multiple sports leagues.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your ESPN API key from [ESPN Developer Portal](https://developer.espn.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/ESPN`
|
||||
|
||||
3. Deploy the ESPN marketplace functions:
|
||||
```bash
|
||||
dbt run --models espn__ espn_utils__espn_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `espn.get(path, query_args)`
|
||||
Make GET requests to ESPN API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get NFL scores
|
||||
SELECT espn.get('/v1/sports/football/nfl/scoreboard', {});
|
||||
|
||||
-- Get NBA team roster
|
||||
SELECT espn.get('/v1/sports/basketball/nba/teams/1/roster', {});
|
||||
|
||||
-- Get MLB standings
|
||||
SELECT espn.get('/v1/sports/baseball/mlb/standings', {});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [ESPN API Documentation](https://site.api.espn.com/apis/site/v2/sports/)
|
||||
39
macros/marketplace/footprint/README.md
Normal file
39
macros/marketplace/footprint/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Footprint Analytics API Integration
|
||||
|
||||
Footprint Analytics provides comprehensive blockchain data analytics with APIs for accessing DeFi, NFT, GameFi, and cross-chain data insights.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Footprint API key from [Footprint Analytics Dashboard](https://www.footprint.network/dashboard)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/FOOTPRINT`
|
||||
|
||||
3. Deploy the Footprint marketplace functions:
|
||||
```bash
|
||||
dbt run --models footprint__ footprint_utils__footprint_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `footprint.get(path, query_args)`
|
||||
Make GET requests to Footprint Analytics API endpoints.
|
||||
|
||||
### `footprint.post(path, body)`
|
||||
Make POST requests to Footprint Analytics API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get DeFi protocol TVL data
|
||||
SELECT footprint.get('/api/v1/defi/protocol/tvl', {'protocol': 'uniswap', 'chain': 'ethereum'});
|
||||
|
||||
-- Get NFT market trends
|
||||
SELECT footprint.get('/api/v1/nft/market/overview', {'timeframe': '7d'});
|
||||
|
||||
-- Get GameFi protocol statistics
|
||||
SELECT footprint.get('/api/v1/gamefi/protocols', {'chain': 'polygon', 'limit': 20});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Footprint Analytics API Documentation](https://docs.footprint.network/)
|
||||
36
macros/marketplace/fred/README.md
Normal file
36
macros/marketplace/fred/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# FRED API Integration
|
||||
|
||||
FRED (Federal Reserve Economic Data) provides access to economic data from the Federal Reserve Bank of St. Louis, including GDP, inflation, employment, and financial market data.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your FRED API key from [FRED API Registration](https://fred.stlouisfed.org/docs/api/api_key.html)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/FRED`
|
||||
|
||||
3. Deploy the FRED marketplace functions:
|
||||
```bash
|
||||
dbt run --models fred__ fred_utils__fred_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `fred.get(path, query_args)`
|
||||
Make GET requests to FRED API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get GDP data
|
||||
SELECT fred.get('/series/observations', {'series_id': 'GDP', 'api_key': 'your_key'});
|
||||
|
||||
-- Get unemployment rate
|
||||
SELECT fred.get('/series/observations', {'series_id': 'UNRATE', 'api_key': 'your_key'});
|
||||
|
||||
-- Get inflation rate (CPI)
|
||||
SELECT fred.get('/series/observations', {'series_id': 'CPIAUCSL', 'api_key': 'your_key'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [FRED API Documentation](https://fred.stlouisfed.org/docs/api/fred/)
|
||||
668
macros/marketplace/github/README.md
Normal file
668
macros/marketplace/github/README.md
Normal file
@ -0,0 +1,668 @@
|
||||
# GitHub Actions Integration for Livequery
|
||||
|
||||
A comprehensive GitHub Actions integration that provides both scalar functions (UDFs) and table functions (UDTFs) for interacting with GitHub's REST API. Monitor workflows, retrieve logs, trigger dispatches, and analyze CI/CD data directly from your data warehouse.
|
||||
|
||||
## Prerequisites & Setup
|
||||
|
||||
### Authentication Setup
|
||||
|
||||
The integration uses GitHub Personal Access Tokens (PAT) or GitHub App tokens for authentication.
|
||||
|
||||
#### Option 1: Personal Access Token (Recommended for Development)
|
||||
|
||||
1. Go to [GitHub Settings → Developer settings → Personal access tokens](https://github.com/settings/tokens)
|
||||
2. Click "Generate new token (classic)"
|
||||
3. Select required scopes:
|
||||
- `repo` - Full control of private repositories
|
||||
- `actions:read` - Read access to Actions (minimum required)
|
||||
- `actions:write` - Write access to Actions (for triggering workflows)
|
||||
- `workflow` - Update GitHub Action workflows (for enable/disable)
|
||||
4. Copy the generated token
|
||||
5. Store securely in your secrets management system
|
||||
|
||||
#### Option 2: GitHub App (Recommended for Production)
|
||||
|
||||
1. Create a GitHub App in your organization settings
|
||||
2. Grant required permissions:
|
||||
- **Actions**: Read & Write
|
||||
- **Contents**: Read
|
||||
- **Metadata**: Read
|
||||
3. Install the app on repositories you want to access
|
||||
4. Use the app's installation token
|
||||
|
||||
### Environment Setup
|
||||
|
||||
The integration automatically handles authentication through Livequery's secrets management:
|
||||
|
||||
- **System users**: Uses `_FSC_SYS/GITHUB` secret path
|
||||
- **Regular users**: Uses `vault/github/api` secret path
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. List Repository Workflows
|
||||
|
||||
```sql
|
||||
-- Get all workflows for a repository
|
||||
SELECT * FROM TABLE(
|
||||
github_actions.tf_workflows('your-org', 'your-repo')
|
||||
);
|
||||
|
||||
-- Or as JSON object
|
||||
SELECT github_actions.workflows('your-org', 'your-repo') as workflows_data;
|
||||
```
|
||||
|
||||
### 2. Monitor Workflow Runs
|
||||
|
||||
```sql
|
||||
-- Get recent workflow runs with status filtering
|
||||
SELECT * FROM TABLE(
|
||||
github_actions.tf_runs('your-org', 'your-repo', {'status': 'completed', 'per_page': 10})
|
||||
);
|
||||
|
||||
-- Get runs for a specific workflow
|
||||
SELECT * FROM TABLE(
|
||||
github_actions.tf_workflow_runs('your-org', 'your-repo', 'ci.yml')
|
||||
);
|
||||
```
|
||||
|
||||
### 3. Analyze Failed Jobs
|
||||
|
||||
```sql
|
||||
-- Get failed jobs with complete logs for troubleshooting
|
||||
SELECT
|
||||
job_name,
|
||||
job_conclusion,
|
||||
job_url,
|
||||
logs
|
||||
FROM TABLE(
|
||||
github_actions.tf_failed_jobs_with_logs('your-org', 'your-repo', '12345678')
|
||||
);
|
||||
```
|
||||
|
||||
### 4. Trigger Workflow Dispatch
|
||||
|
||||
```sql
|
||||
-- Trigger a workflow manually
|
||||
SELECT github_actions.workflow_dispatches(
|
||||
'your-org',
|
||||
'your-repo',
|
||||
'deploy.yml',
|
||||
{
|
||||
'ref': 'main',
|
||||
'inputs': {
|
||||
'environment': 'staging',
|
||||
'debug': 'true'
|
||||
}
|
||||
}
|
||||
) as dispatch_result;
|
||||
```
|
||||
|
||||
## Function Reference
|
||||
|
||||
### Utility Functions (`github_utils` schema)
|
||||
|
||||
#### `github_utils.octocat()`
|
||||
Test GitHub API connectivity and authentication.
|
||||
```sql
|
||||
SELECT github_utils.octocat();
|
||||
-- Returns: GitHub API response with Octocat ASCII art
|
||||
```
|
||||
|
||||
#### `github_utils.headers()`
|
||||
Get properly formatted GitHub API headers.
|
||||
```sql
|
||||
SELECT github_utils.headers();
|
||||
-- Returns: '{"Authorization": "Bearer {TOKEN}", ...}'
|
||||
```
|
||||
|
||||
#### `github_utils.get_api(route, query)`
|
||||
Make GET requests to GitHub API.
|
||||
```sql
|
||||
SELECT github_utils.get_api('repos/your-org/your-repo', {'per_page': 10});
|
||||
```
|
||||
|
||||
#### `github_utils.post_api(route, data)`
|
||||
Make POST requests to GitHub API.
|
||||
```sql
|
||||
SELECT github_utils.post_api('repos/your-org/your-repo/issues', {
|
||||
'title': 'New Issue',
|
||||
'body': 'Issue description'
|
||||
});
|
||||
```
|
||||
|
||||
#### `github_utils.put_api(route, data)`
|
||||
Make PUT requests to GitHub API.
|
||||
```sql
|
||||
SELECT github_utils.put_api('repos/your-org/your-repo/actions/workflows/ci.yml/enable', {});
|
||||
```
|
||||
|
||||
### Workflow Functions (`github_actions` schema)
|
||||
|
||||
#### Scalar Functions (Return JSON Objects)
|
||||
|
||||
##### `github_actions.workflows(owner, repo[, query])`
|
||||
List repository workflows.
|
||||
```sql
|
||||
-- Basic usage
|
||||
SELECT github_actions.workflows('FlipsideCrypto', 'admin-models');
|
||||
|
||||
-- With query parameters
|
||||
SELECT github_actions.workflows('FlipsideCrypto', 'admin-models', {'per_page': 50});
|
||||
```
|
||||
|
||||
##### `github_actions.runs(owner, repo[, query])`
|
||||
List workflow runs for a repository.
|
||||
```sql
|
||||
-- Get recent runs
|
||||
SELECT github_actions.runs('your-org', 'your-repo');
|
||||
|
||||
-- Filter by status and branch
|
||||
SELECT github_actions.runs('your-org', 'your-repo', {
|
||||
'status': 'completed',
|
||||
'branch': 'main',
|
||||
'per_page': 20
|
||||
});
|
||||
```
|
||||
|
||||
##### `github_actions.workflow_runs(owner, repo, workflow_id[, query])`
|
||||
List runs for a specific workflow.
|
||||
```sql
|
||||
-- Get runs for CI workflow
|
||||
SELECT github_actions.workflow_runs('your-org', 'your-repo', 'ci.yml');
|
||||
|
||||
-- With filtering
|
||||
SELECT github_actions.workflow_runs('your-org', 'your-repo', 'ci.yml', {
|
||||
'status': 'failure',
|
||||
'per_page': 10
|
||||
});
|
||||
```
|
||||
|
||||
##### `github_actions.workflow_dispatches(owner, repo, workflow_id[, body])`
|
||||
Trigger a workflow dispatch event.
|
||||
```sql
|
||||
-- Simple dispatch (uses main branch)
|
||||
SELECT github_actions.workflow_dispatches('your-org', 'your-repo', 'deploy.yml');
|
||||
|
||||
-- With custom inputs
|
||||
SELECT github_actions.workflow_dispatches('your-org', 'your-repo', 'deploy.yml', {
|
||||
'ref': 'develop',
|
||||
'inputs': {
|
||||
'environment': 'staging',
|
||||
'version': '1.2.3'
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
##### `github_actions.workflow_enable(owner, repo, workflow_id)`
|
||||
Enable a workflow.
|
||||
```sql
|
||||
SELECT github_actions.workflow_enable('your-org', 'your-repo', 'ci.yml');
|
||||
```
|
||||
|
||||
##### `github_actions.workflow_disable(owner, repo, workflow_id)`
|
||||
Disable a workflow.
|
||||
```sql
|
||||
SELECT github_actions.workflow_disable('your-org', 'your-repo', 'ci.yml');
|
||||
```
|
||||
|
||||
##### `github_actions.workflow_run_logs(owner, repo, run_id)`
|
||||
Get download URL for workflow run logs.
|
||||
```sql
|
||||
SELECT github_actions.workflow_run_logs('your-org', 'your-repo', '12345678');
|
||||
```
|
||||
|
||||
##### `github_actions.job_logs(owner, repo, job_id)`
|
||||
Get plain text logs for a specific job.
|
||||
```sql
|
||||
SELECT github_actions.job_logs('your-org', 'your-repo', '87654321');
|
||||
```
|
||||
|
||||
##### `github_actions.workflow_run_jobs(owner, repo, run_id[, query])`
|
||||
List jobs for a workflow run.
|
||||
```sql
|
||||
-- Get all jobs
|
||||
SELECT github_actions.workflow_run_jobs('your-org', 'your-repo', '12345678');
|
||||
|
||||
-- Filter to latest attempt only
|
||||
SELECT github_actions.workflow_run_jobs('your-org', 'your-repo', '12345678', {
|
||||
'filter': 'latest'
|
||||
});
|
||||
```
|
||||
|
||||
#### Table Functions (Return Structured Data)
|
||||
|
||||
##### `github_actions.tf_workflows(owner, repo[, query])`
|
||||
List workflows as structured table data.
|
||||
```sql
|
||||
SELECT
|
||||
id,
|
||||
name,
|
||||
path,
|
||||
state,
|
||||
created_at,
|
||||
updated_at,
|
||||
badge_url,
|
||||
html_url
|
||||
FROM TABLE(github_actions.tf_workflows('your-org', 'your-repo'));
|
||||
```
|
||||
|
||||
##### `github_actions.tf_runs(owner, repo[, query])`
|
||||
List workflow runs as structured table data.
|
||||
```sql
|
||||
SELECT
|
||||
id,
|
||||
name,
|
||||
status,
|
||||
conclusion,
|
||||
head_branch,
|
||||
head_sha,
|
||||
run_number,
|
||||
event,
|
||||
created_at,
|
||||
updated_at,
|
||||
html_url
|
||||
FROM TABLE(github_actions.tf_runs('your-org', 'your-repo', {'per_page': 20}));
|
||||
```
|
||||
|
||||
##### `github_actions.tf_workflow_runs(owner, repo, workflow_id[, query])`
|
||||
List runs for a specific workflow as structured table data.
|
||||
```sql
|
||||
SELECT
|
||||
id,
|
||||
name,
|
||||
status,
|
||||
conclusion,
|
||||
run_number,
|
||||
head_branch,
|
||||
created_at,
|
||||
html_url
|
||||
FROM TABLE(github_actions.tf_workflow_runs('your-org', 'your-repo', 'ci.yml'));
|
||||
```
|
||||
|
||||
##### `github_actions.tf_workflow_run_jobs(owner, repo, run_id[, query])`
|
||||
List jobs for a workflow run as structured table data.
|
||||
```sql
|
||||
SELECT
|
||||
id,
|
||||
name,
|
||||
status,
|
||||
conclusion,
|
||||
started_at,
|
||||
completed_at,
|
||||
runner_name,
|
||||
runner_group_name,
|
||||
html_url
|
||||
FROM TABLE(github_actions.tf_workflow_run_jobs('your-org', 'your-repo', '12345678'));
|
||||
```
|
||||
|
||||
##### `github_actions.tf_failed_jobs_with_logs(owner, repo, run_id)`
|
||||
Get failed jobs with their complete logs for analysis.
|
||||
```sql
|
||||
SELECT
|
||||
job_id,
|
||||
job_name,
|
||||
job_status,
|
||||
job_conclusion,
|
||||
job_url,
|
||||
failed_steps,
|
||||
logs
|
||||
FROM TABLE(github_actions.tf_failed_jobs_with_logs('your-org', 'your-repo', '12345678'));
|
||||
```
|
||||
|
||||
## Advanced Usage Examples
|
||||
|
||||
### CI/CD Monitoring Dashboard
|
||||
|
||||
```sql
|
||||
-- Recent workflow runs with failure rate
|
||||
WITH recent_runs AS (
|
||||
SELECT
|
||||
name,
|
||||
status,
|
||||
conclusion,
|
||||
head_branch,
|
||||
created_at,
|
||||
html_url
|
||||
FROM TABLE(github_actions.tf_runs('your-org', 'your-repo', {'per_page': 100}))
|
||||
WHERE created_at >= CURRENT_DATE - 7
|
||||
)
|
||||
SELECT
|
||||
name,
|
||||
COUNT(*) as total_runs,
|
||||
COUNT(CASE WHEN conclusion = 'success' THEN 1 END) as successful_runs,
|
||||
COUNT(CASE WHEN conclusion = 'failure' THEN 1 END) as failed_runs,
|
||||
ROUND(COUNT(CASE WHEN conclusion = 'failure' THEN 1 END) * 100.0 / COUNT(*), 2) as failure_rate_pct
|
||||
FROM recent_runs
|
||||
GROUP BY name
|
||||
ORDER BY failure_rate_pct DESC;
|
||||
```
|
||||
|
||||
### Failed Job Analysis
|
||||
|
||||
#### Multi-Run Failure Analysis
|
||||
```sql
|
||||
-- Analyze failures across multiple runs
|
||||
WITH failed_jobs AS (
|
||||
SELECT
|
||||
r.id as run_id,
|
||||
r.name as workflow_name,
|
||||
r.head_branch,
|
||||
r.created_at as run_created_at,
|
||||
j.job_name,
|
||||
j.job_conclusion,
|
||||
j.logs
|
||||
FROM TABLE(github_actions.tf_runs('your-org', 'your-repo', {'status': 'completed'})) r
|
||||
CROSS JOIN TABLE(github_actions.tf_failed_jobs_with_logs('your-org', 'your-repo', r.id::TEXT)) j
|
||||
WHERE r.conclusion = 'failure'
|
||||
AND r.created_at >= CURRENT_DATE - 3
|
||||
)
|
||||
SELECT
|
||||
workflow_name,
|
||||
job_name,
|
||||
COUNT(*) as failure_count,
|
||||
ARRAY_AGG(DISTINCT head_branch) as affected_branches,
|
||||
ARRAY_AGG(logs LIMIT 3) as sample_logs
|
||||
FROM failed_jobs
|
||||
GROUP BY workflow_name, job_name
|
||||
ORDER BY failure_count DESC;
|
||||
```
|
||||
|
||||
#### Specific Job Log Analysis
|
||||
```sql
|
||||
-- Get detailed logs for a specific failed job
|
||||
WITH specific_job AS (
|
||||
SELECT
|
||||
id as job_id,
|
||||
name as job_name,
|
||||
status,
|
||||
conclusion,
|
||||
started_at,
|
||||
completed_at,
|
||||
html_url,
|
||||
steps
|
||||
FROM TABLE(github_actions.tf_workflow_run_jobs('your-org', 'your-repo', '12345678'))
|
||||
WHERE name = 'Build and Test' -- Specify the job name you want to analyze
|
||||
AND conclusion = 'failure'
|
||||
)
|
||||
SELECT
|
||||
job_id,
|
||||
job_name,
|
||||
status,
|
||||
conclusion,
|
||||
started_at,
|
||||
completed_at,
|
||||
html_url,
|
||||
steps,
|
||||
github_actions.job_logs('your-org', 'your-repo', job_id::TEXT) as full_logs
|
||||
FROM specific_job;
|
||||
```
|
||||
|
||||
#### From Workflow ID to Failed Logs
|
||||
```sql
|
||||
-- Complete workflow: Workflow ID → Run ID → Failed Logs
|
||||
WITH latest_failed_run AS (
|
||||
-- Step 1: Get the most recent failed run for your workflow
|
||||
SELECT
|
||||
id as run_id,
|
||||
name as workflow_name,
|
||||
status,
|
||||
conclusion,
|
||||
head_branch,
|
||||
head_sha,
|
||||
created_at,
|
||||
html_url as run_url
|
||||
FROM TABLE(github_actions.tf_workflow_runs('your-org', 'your-repo', 'ci.yml')) -- Your workflow ID here
|
||||
WHERE conclusion = 'failure'
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 1
|
||||
),
|
||||
failed_jobs_with_logs AS (
|
||||
-- Step 2: Get all failed jobs and their logs for that run
|
||||
SELECT
|
||||
r.run_id,
|
||||
r.workflow_name,
|
||||
r.head_branch,
|
||||
r.head_sha,
|
||||
r.created_at,
|
||||
r.run_url,
|
||||
j.job_id,
|
||||
j.job_name,
|
||||
j.job_status,
|
||||
j.job_conclusion,
|
||||
j.job_url,
|
||||
j.failed_steps,
|
||||
j.logs
|
||||
FROM latest_failed_run r
|
||||
CROSS JOIN TABLE(github_actions.tf_failed_jobs_with_logs('your-org', 'your-repo', r.run_id::TEXT)) j
|
||||
)
|
||||
SELECT
|
||||
run_id,
|
||||
workflow_name,
|
||||
head_branch,
|
||||
created_at,
|
||||
run_url,
|
||||
job_name,
|
||||
job_url,
|
||||
-- Extract key error information from logs
|
||||
CASE
|
||||
WHEN CONTAINS(logs, 'npm ERR!') THEN 'NPM Error'
|
||||
WHEN CONTAINS(logs, 'fatal:') THEN 'Git Error'
|
||||
WHEN CONTAINS(logs, 'Error: Process completed with exit code') THEN 'Process Exit Error'
|
||||
WHEN CONTAINS(logs, 'timeout') THEN 'Timeout Error'
|
||||
ELSE 'Other Error'
|
||||
END as error_type,
|
||||
-- Get first error line from logs
|
||||
REGEXP_SUBSTR(logs, '.*Error[^\\n]*', 1, 1) as first_error_line,
|
||||
-- Full logs for detailed analysis
|
||||
logs as full_logs
|
||||
FROM failed_jobs_with_logs
|
||||
ORDER BY job_name;
|
||||
```
|
||||
|
||||
#### Quick Workflow ID to Run ID Lookup
|
||||
```sql
|
||||
-- Simple: Just get run IDs for a specific workflow
|
||||
SELECT
|
||||
id as run_id,
|
||||
status,
|
||||
conclusion,
|
||||
head_branch,
|
||||
created_at,
|
||||
html_url
|
||||
FROM TABLE(github_actions.tf_workflow_runs('your-org', 'your-repo', 'ci.yml')) -- Replace with your workflow ID
|
||||
WHERE conclusion = 'failure'
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 5;
|
||||
```
|
||||
|
||||
#### Failed Steps Deep Dive
|
||||
```sql
|
||||
-- Analyze failed steps within jobs and extract error patterns
|
||||
WITH job_details AS (
|
||||
SELECT
|
||||
id as job_id,
|
||||
name as job_name,
|
||||
conclusion,
|
||||
steps,
|
||||
github_actions.job_logs('your-org', 'your-repo', id::TEXT) as logs
|
||||
FROM TABLE(github_actions.tf_workflow_run_jobs('your-org', 'your-repo', '12345678'))
|
||||
WHERE conclusion = 'failure'
|
||||
),
|
||||
failed_steps AS (
|
||||
SELECT
|
||||
job_id,
|
||||
job_name,
|
||||
step.value:name::STRING as step_name,
|
||||
step.value:conclusion::STRING as step_conclusion,
|
||||
step.value:number::INTEGER as step_number,
|
||||
logs
|
||||
FROM job_details,
|
||||
LATERAL FLATTEN(input => steps:steps) step
|
||||
WHERE step.value:conclusion::STRING = 'failure'
|
||||
)
|
||||
SELECT
|
||||
job_name,
|
||||
step_name,
|
||||
step_number,
|
||||
step_conclusion,
|
||||
-- Extract error messages from logs (first 1000 chars)
|
||||
SUBSTR(logs, GREATEST(1, CHARINDEX('Error:', logs) - 50), 1000) as error_context,
|
||||
-- Extract common error patterns
|
||||
CASE
|
||||
WHEN CONTAINS(logs, 'npm ERR!') THEN 'NPM Error'
|
||||
WHEN CONTAINS(logs, 'fatal:') THEN 'Git Error'
|
||||
WHEN CONTAINS(logs, 'Error: Process completed with exit code') THEN 'Process Exit Error'
|
||||
WHEN CONTAINS(logs, 'timeout') THEN 'Timeout Error'
|
||||
WHEN CONTAINS(logs, 'permission denied') THEN 'Permission Error'
|
||||
ELSE 'Other Error'
|
||||
END as error_category
|
||||
FROM failed_steps
|
||||
ORDER BY job_name, step_number;
|
||||
```
|
||||
|
||||
### Workflow Performance Metrics
|
||||
|
||||
```sql
|
||||
-- Average workflow duration by branch
|
||||
SELECT
|
||||
head_branch,
|
||||
AVG(DATEDIFF(second, run_started_at, updated_at)) as avg_duration_seconds,
|
||||
COUNT(*) as run_count,
|
||||
COUNT(CASE WHEN conclusion = 'success' THEN 1 END) as success_count
|
||||
FROM TABLE(github_actions.tf_runs('your-org', 'your-repo', {'per_page': 200}))
|
||||
WHERE run_started_at IS NOT NULL
|
||||
AND updated_at IS NOT NULL
|
||||
AND status = 'completed'
|
||||
AND created_at >= CURRENT_DATE - 30
|
||||
GROUP BY head_branch
|
||||
ORDER BY avg_duration_seconds DESC;
|
||||
```
|
||||
|
||||
### Automated Workflow Management
|
||||
|
||||
```sql
|
||||
-- Conditionally trigger deployment based on main branch success
|
||||
WITH latest_main_run AS (
|
||||
SELECT
|
||||
id,
|
||||
conclusion,
|
||||
head_sha,
|
||||
created_at
|
||||
FROM TABLE(github_actions.tf_runs('your-org', 'your-repo', {
|
||||
'branch': 'main',
|
||||
'per_page': 1
|
||||
}))
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 1
|
||||
)
|
||||
SELECT
|
||||
CASE
|
||||
WHEN conclusion = 'success' THEN
|
||||
github_actions.workflow_dispatches('your-org', 'your-repo', 'deploy.yml', {
|
||||
'ref': 'main',
|
||||
'inputs': {'sha': head_sha}
|
||||
})
|
||||
ELSE
|
||||
OBJECT_CONSTRUCT('skipped', true, 'reason', 'main branch tests failed')
|
||||
END as deployment_result
|
||||
FROM latest_main_run;
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
All functions return structured responses with error information:
|
||||
|
||||
```sql
|
||||
-- Check for API errors
|
||||
WITH api_response AS (
|
||||
SELECT github_actions.workflows('invalid-org', 'invalid-repo') as response
|
||||
)
|
||||
SELECT
|
||||
response:status_code as status_code,
|
||||
response:error as error_message,
|
||||
response:data as data
|
||||
FROM api_response;
|
||||
```
|
||||
|
||||
Common HTTP status codes:
|
||||
- **200**: Success
|
||||
- **401**: Unauthorized (check token permissions)
|
||||
- **403**: Forbidden (check repository access)
|
||||
- **404**: Not found (check org/repo/workflow names)
|
||||
- **422**: Validation failed (check input parameters)
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
GitHub API has rate limits:
|
||||
- **Personal tokens**: 5,000 requests per hour
|
||||
- **GitHub App tokens**: 5,000 requests per hour per installation
|
||||
- **Search API**: 30 requests per minute
|
||||
|
||||
The functions automatically handle rate limiting through Livequery's retry mechanisms.
|
||||
|
||||
## Security Best Practices
|
||||
|
||||
1. **Use minimal permissions**: Only grant necessary scopes to tokens
|
||||
2. **Rotate tokens regularly**: Set expiration dates and rotate tokens
|
||||
3. **Use GitHub Apps for production**: More secure than personal access tokens
|
||||
4. **Monitor usage**: Track API calls to avoid rate limits
|
||||
5. **Secure storage**: Use proper secrets management for tokens
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
**Authentication Errors (401)**
|
||||
```sql
|
||||
-- Test authentication
|
||||
SELECT github_utils.octocat();
|
||||
-- Should return status_code = 200 if token is valid
|
||||
```
|
||||
|
||||
**Permission Errors (403)**
|
||||
- Ensure token has required scopes (`actions:read` minimum)
|
||||
- Check if repository is accessible to the token owner
|
||||
- For private repos, ensure `repo` scope is granted
|
||||
|
||||
**Workflow Not Found (404)**
|
||||
```sql
|
||||
-- List available workflows first
|
||||
SELECT * FROM TABLE(github_actions.tf_workflows('your-org', 'your-repo'));
|
||||
```
|
||||
|
||||
**Rate Limiting (403 with rate limit message)**
|
||||
- Implement request spacing in your queries
|
||||
- Use pagination parameters to reduce request frequency
|
||||
- Monitor your rate limit status
|
||||
|
||||
### Performance Tips
|
||||
|
||||
1. **Use table functions for analytics**: More efficient for large datasets
|
||||
2. **Implement pagination**: Use `per_page` parameter to control response size
|
||||
3. **Cache results**: Store frequently accessed data in tables
|
||||
4. **Filter at API level**: Use query parameters instead of SQL WHERE clauses
|
||||
5. **Batch operations**: Combine multiple API calls where possible
|
||||
|
||||
## GitHub API Documentation
|
||||
|
||||
- [GitHub REST API](https://docs.github.com/en/rest) - Complete API reference
|
||||
- [Actions API](https://docs.github.com/en/rest/actions) - Actions-specific endpoints
|
||||
- [Authentication](https://docs.github.com/en/rest/overview/authenticating-to-the-rest-api) - Token setup and permissions
|
||||
- [Rate Limiting](https://docs.github.com/en/rest/overview/rate-limits-for-the-rest-api) - API limits and best practices
|
||||
|
||||
## Function Summary
|
||||
|
||||
| Function | Type | Purpose |
|
||||
|----------|------|---------|
|
||||
| `github_utils.octocat()` | UDF | Test API connectivity |
|
||||
| `github_utils.get_api/post_api/put_api()` | UDF | Generic API requests |
|
||||
| `github_actions.workflows()` | UDF | List workflows (JSON) |
|
||||
| `github_actions.runs()` | UDF | List runs (JSON) |
|
||||
| `github_actions.workflow_runs()` | UDF | List workflow runs (JSON) |
|
||||
| `github_actions.workflow_dispatches()` | UDF | Trigger workflows |
|
||||
| `github_actions.workflow_enable/disable()` | UDF | Control workflow state |
|
||||
| `github_actions.*_logs()` | UDF | Retrieve logs |
|
||||
| `github_actions.tf_*()` | UDTF | Structured table data |
|
||||
| `github_actions.tf_failed_jobs_with_logs()` | UDTF | Failed job analysis |
|
||||
|
||||
Ready to monitor and automate your GitHub Actions workflows directly from your data warehouse!
|
||||
@ -13,7 +13,7 @@
|
||||
COMMENT = $$[List repository workflows](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#list-repository-workflows).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.GET(
|
||||
{{ utils_schema_name }}.get_api(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows'),
|
||||
query
|
||||
):data::OBJECT
|
||||
@ -40,7 +40,7 @@
|
||||
COMMENT = $$Lists all workflow runs for a repository. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-repository).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.GET(
|
||||
{{ utils_schema_name }}.get_api(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/runs'),
|
||||
query
|
||||
):data::OBJECT
|
||||
@ -68,7 +68,7 @@
|
||||
COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.GET(
|
||||
{{ utils_schema_name }}.get_api(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'runs'),
|
||||
query
|
||||
):data::OBJECT
|
||||
@ -97,7 +97,7 @@
|
||||
COMMENT = $$You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace workflow_id with the workflow file name. For example, you could use main.yaml. [Docs](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#create-a-workflow-dispatch-event).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.POST(
|
||||
{{ utils_schema_name }}.post_api(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'dispatches'),
|
||||
COALESCE(body, {'ref': 'main'})::OBJECT
|
||||
)::OBJECT
|
||||
@ -126,7 +126,7 @@
|
||||
COMMENT = $$Enables a workflow. You can replace workflow_id with the workflow file name. For example, you could use main.yaml. [Docs](https://docs.github.com/en/rest/reference/actions#enable-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.PUT(
|
||||
{{ utils_schema_name }}.put_api(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'enable'),
|
||||
{}
|
||||
)::OBJECT
|
||||
@ -141,8 +141,70 @@
|
||||
COMMENT = $$Disables a workflow. You can replace workflow_id with the workflow file name. For example, you could use main.yaml. [Docs](https://docs.github.com/en/rest/reference/actions#disable-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.PUT(
|
||||
{{ utils_schema_name }}.put_api(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'disable'),
|
||||
{}
|
||||
)::OBJECT
|
||||
|
||||
- name: {{ schema_name -}}.workflow_run_logs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [run_id, "TEXT"]
|
||||
return_type:
|
||||
- "TEXT"
|
||||
options: |
|
||||
COMMENT = $$Download workflow run logs as a ZIP archive. Gets a redirect URL to the actual log archive. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#download-workflow-run-logs).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.get_api(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/runs', run_id, 'logs'),
|
||||
{}
|
||||
):data::TEXT
|
||||
|
||||
- name: {{ schema_name -}}.job_logs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [job_id, "TEXT"]
|
||||
return_type:
|
||||
- "TEXT"
|
||||
options: |
|
||||
COMMENT = $$Download job logs. Gets the plain text logs for a specific job. [Docs](https://docs.github.com/en/rest/actions/workflow-jobs?apiVersion=2022-11-28#download-job-logs-for-a-workflow-run).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.get_api(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/jobs', job_id, 'logs'),
|
||||
{}
|
||||
):data::TEXT
|
||||
|
||||
- name: {{ schema_name -}}.workflow_run_jobs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [run_id, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Lists jobs for a workflow run. [Docs](https://docs.github.com/en/rest/actions/workflow-jobs?apiVersion=2022-11-28#list-jobs-for-a-workflow-run).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.get_api(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/runs', run_id, 'jobs'),
|
||||
query
|
||||
):data::OBJECT
|
||||
- name: {{ schema_name -}}.workflow_run_jobs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [run_id, "TEXT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Lists jobs for a workflow run. [Docs](https://docs.github.com/en/rest/actions/workflow-jobs?apiVersion=2022-11-28#list-jobs-for-a-workflow-run).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ schema_name -}}.workflow_run_jobs(owner, repo, run_id, {})
|
||||
|
||||
{% endmacro %}
|
||||
@ -166,4 +166,181 @@
|
||||
SELECT *
|
||||
FROM TABLE({{ schema_name -}}.tf_workflow_runs(owner, repo, WORKFLKOW_ID, {}))
|
||||
|
||||
{% endmacro %}
|
||||
- name: {{ schema_name -}}.tf_workflow_run_jobs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [run_id, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "TABLE(id NUMBER, run_id NUMBER, workflow_name STRING, head_branch STRING, run_url STRING, run_attempt NUMBER, node_id STRING, head_sha STRING, url STRING, html_url STRING, status STRING, conclusion STRING, created_at TIMESTAMP, started_at TIMESTAMP, completed_at TIMESTAMP, name STRING, check_run_url STRING, labels VARIANT, runner_id NUMBER, runner_name STRING, runner_group_id NUMBER, runner_group_name STRING, steps VARIANT)"
|
||||
options: |
|
||||
COMMENT = $$Lists jobs for a workflow run as a table. [Docs](https://docs.github.com/en/rest/actions/workflow-jobs?apiVersion=2022-11-28#list-jobs-for-a-workflow-run).$$
|
||||
sql: |
|
||||
WITH response AS
|
||||
(
|
||||
SELECT
|
||||
github_actions.workflow_run_jobs(OWNER, REPO, RUN_ID, QUERY) AS response
|
||||
)
|
||||
SELECT
|
||||
value:id::NUMBER AS id
|
||||
,value:run_id::NUMBER AS run_id
|
||||
,value:workflow_name::STRING AS workflow_name
|
||||
,value:head_branch::STRING AS head_branch
|
||||
,value:run_url::STRING AS run_url
|
||||
,value:run_attempt::NUMBER AS run_attempt
|
||||
,value:node_id::STRING AS node_id
|
||||
,value:head_sha::STRING AS head_sha
|
||||
,value:url::STRING AS url
|
||||
,value:html_url::STRING AS html_url
|
||||
,value:status::STRING AS status
|
||||
,value:conclusion::STRING AS conclusion
|
||||
,value:created_at::TIMESTAMP AS created_at
|
||||
,value:started_at::TIMESTAMP AS started_at
|
||||
,value:completed_at::TIMESTAMP AS completed_at
|
||||
,value:name::STRING AS name
|
||||
,value:check_run_url::STRING AS check_run_url
|
||||
,value:labels::VARIANT AS labels
|
||||
,value:runner_id::NUMBER AS runner_id
|
||||
,value:runner_name::STRING AS runner_name
|
||||
,value:runner_group_id::NUMBER AS runner_group_id
|
||||
,value:runner_group_name::STRING AS runner_group_name
|
||||
,value:steps::VARIANT AS steps
|
||||
FROM response, LATERAL FLATTEN( input=> response:jobs)
|
||||
|
||||
- name: {{ schema_name -}}.tf_workflow_run_jobs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [run_id, "TEXT"]
|
||||
return_type:
|
||||
- "TABLE(id NUMBER, run_id NUMBER, workflow_name STRING, head_branch STRING, run_url STRING, run_attempt NUMBER, node_id STRING, head_sha STRING, url STRING, html_url STRING, status STRING, conclusion STRING, created_at TIMESTAMP, started_at TIMESTAMP, completed_at TIMESTAMP, name STRING, check_run_url STRING, labels VARIANT, runner_id NUMBER, runner_name STRING, runner_group_id NUMBER, runner_group_name STRING, steps VARIANT)"
|
||||
options: |
|
||||
COMMENT = $$Lists jobs for a workflow run as a table. [Docs](https://docs.github.com/en/rest/actions/workflow-jobs?apiVersion=2022-11-28#list-jobs-for-a-workflow-run).$$
|
||||
sql: |
|
||||
SELECT *
|
||||
FROM TABLE({{ schema_name -}}.tf_workflow_run_jobs(owner, repo, run_id, {}))
|
||||
|
||||
- name: {{ schema_name -}}.tf_failed_jobs_with_logs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [run_id, "TEXT"]
|
||||
return_type:
|
||||
- "TABLE(run_id STRING, job_id NUMBER, job_name STRING, job_status STRING, job_conclusion STRING, job_url STRING, workflow_name STRING, failed_steps VARIANT, logs TEXT, failed_step_logs ARRAY)"
|
||||
options: |
|
||||
COMMENT = $$Gets failed jobs for a workflow run with their complete logs. Combines job info with log content for analysis.$$
|
||||
sql: |
|
||||
WITH failed_jobs AS (
|
||||
SELECT
|
||||
run_id::STRING AS run_id,
|
||||
id AS job_id,
|
||||
name AS job_name,
|
||||
status AS job_status,
|
||||
conclusion AS job_conclusion,
|
||||
html_url AS job_url,
|
||||
workflow_name,
|
||||
steps AS failed_steps,
|
||||
{{ schema_name -}}.job_logs(owner, repo, job_id::TEXT) AS logs
|
||||
FROM TABLE({{ schema_name -}}.tf_workflow_run_jobs(owner, repo, run_id))
|
||||
WHERE conclusion = 'failure'
|
||||
),
|
||||
error_sections AS (
|
||||
SELECT
|
||||
run_id,
|
||||
job_id,
|
||||
job_name,
|
||||
job_status,
|
||||
job_conclusion,
|
||||
job_url,
|
||||
workflow_name,
|
||||
failed_steps,
|
||||
logs,
|
||||
ARRAY_AGG(section.value) AS failed_step_logs
|
||||
FROM failed_jobs,
|
||||
LATERAL FLATTEN(INPUT => SPLIT(logs, '##[group]')) section
|
||||
WHERE CONTAINS(section.value, '##[error]')
|
||||
GROUP BY run_id, job_id, job_name, job_status, job_conclusion, job_url, workflow_name, failed_steps, logs
|
||||
)
|
||||
SELECT
|
||||
run_id,
|
||||
job_id,
|
||||
job_name,
|
||||
job_status,
|
||||
job_conclusion,
|
||||
job_url,
|
||||
workflow_name,
|
||||
failed_steps,
|
||||
logs,
|
||||
COALESCE(failed_step_logs, ARRAY_CONSTRUCT()) AS failed_step_logs
|
||||
FROM failed_jobs
|
||||
LEFT JOIN error_sections USING (run_id, job_id)
|
||||
|
||||
- name: {{ schema_name -}}.tf_failure_analysis_with_ai
|
||||
signature:
|
||||
- [owner, "TEXT", "GitHub repository owner/organization name"]
|
||||
- [repo, "TEXT", "GitHub repository name"]
|
||||
- [run_id, "TEXT", "GitHub Actions run ID to analyze"]
|
||||
- [ai_provider, "TEXT", "AI provider to use: 'cortex' (Snowflake built-in AI)"]
|
||||
- [model_name, "STRING", "Model name (required): 'mistral-large', 'mistral-7b', 'llama2-70b-chat', 'mixtral-8x7b'"]
|
||||
- [ai_prompt, "STRING", "Custom AI analysis prompt. Leave empty to use default failure analysis prompt."]
|
||||
return_type:
|
||||
- "TABLE(run_id STRING, ai_analysis STRING, total_failures NUMBER, failure_metadata ARRAY)"
|
||||
options: |
|
||||
COMMENT = $$Gets GitHub Actions failure analysis using Snowflake Cortex AI with custom prompts for Slack notifications.$$
|
||||
sql: |
|
||||
WITH failure_data AS (
|
||||
SELECT
|
||||
run_id,
|
||||
COUNT(*) as total_failures,
|
||||
ARRAY_AGG(OBJECT_CONSTRUCT(
|
||||
'workflow_name', workflow_name,
|
||||
'run_id', run_id,
|
||||
'job_name', job_name,
|
||||
'job_id', job_id,
|
||||
'job_url', job_url,
|
||||
'error_sections', ARRAY_SIZE(failed_step_logs),
|
||||
'logs_preview', ARRAY_TO_STRING(failed_step_logs, '\n')
|
||||
)) as failure_metadata,
|
||||
LISTAGG(
|
||||
CONCAT(
|
||||
'Workflow: ', workflow_name, '\n',
|
||||
'Job: ', job_name, '\n',
|
||||
'Job ID: ', job_id, '\n',
|
||||
'Run ID: ', run_id, '\n',
|
||||
'Error: ', ARRAY_TO_STRING(failed_step_logs, '\n')
|
||||
),
|
||||
'\n\n---\n\n'
|
||||
) WITHIN GROUP (ORDER BY job_name) as job_details
|
||||
FROM TABLE({{ schema_name -}}.tf_failed_jobs_with_logs(owner, repo, run_id))
|
||||
GROUP BY run_id
|
||||
)
|
||||
SELECT
|
||||
run_id::STRING,
|
||||
snowflake.cortex.complete(
|
||||
model_name,
|
||||
CONCAT(
|
||||
COALESCE(
|
||||
NULLIF(ai_prompt, ''),
|
||||
'Analyze these GitHub Actions failures and provide:\n1. Common failure patterns\n2. Root cause analysis\n3. Prioritized action items\n\nKeep it concise with 1-2 sentences per section in markdown format.\n\n'
|
||||
),
|
||||
job_details
|
||||
)
|
||||
) as ai_analysis,
|
||||
total_failures,
|
||||
failure_metadata
|
||||
FROM failure_data
|
||||
|
||||
- name: {{ schema_name -}}.tf_failure_analysis_with_ai
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [run_id, "TEXT"]
|
||||
return_type:
|
||||
- "TABLE(run_id STRING, ai_analysis STRING, total_failures NUMBER, failure_metadata ARRAY)"
|
||||
options: |
|
||||
COMMENT = $$Gets GitHub Actions failure analysis with default AI provider (cortex) for Slack notifications.$$
|
||||
sql: |
|
||||
SELECT * FROM TABLE({{ schema_name -}}.tf_failure_analysis_with_ai(owner, repo, run_id, 'cortex', 'mistral-large', ''))
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
@ -11,14 +11,26 @@
|
||||
COMMENT = $$Verify token [Authenticating to the REST API](https://docs.github.com/en/rest/overview/authenticating-to-the-rest-api?apiVersion=2022-11-28).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{% set v2_exists = is_udf_api_v2_compatible() %}
|
||||
{% if v2_exists -%}
|
||||
live.udf_api_v2(
|
||||
'GET',
|
||||
'https://api.github.com/octocat',
|
||||
{'Authorization': 'Bearer {TOKEN}', 'X-GitHub-Api-Version': '2022-11-28'},
|
||||
{},
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'Vault/github/api'),
|
||||
TRUE
|
||||
)
|
||||
{%- else -%}
|
||||
live.udf_api(
|
||||
'GET',
|
||||
'https://api.github.com/octocat',
|
||||
{'Authorization': 'Bearer {TOKEN}',
|
||||
'X-GitHub-Api-Version': '2022-11-28'},
|
||||
{'Authorization': 'Bearer {TOKEN}', 'X-GitHub-Api-Version': '2022-11-28'},
|
||||
{},
|
||||
'_FSC_SYS/GITHUB'
|
||||
) as response
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'Vault/github/api')
|
||||
)
|
||||
{%- endif %}
|
||||
as response
|
||||
|
||||
- name: {{ schema_name -}}.headers
|
||||
signature: []
|
||||
@ -31,9 +43,10 @@
|
||||
sql: |
|
||||
SELECT '{"Authorization": "Bearer {TOKEN}",
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
"Accept": "application/vnd.github+json"}'
|
||||
"Accept": "application/vnd.github+json"
|
||||
}'
|
||||
|
||||
- name: {{ schema_name -}}.get
|
||||
- name: {{ schema_name -}}.get_api
|
||||
signature:
|
||||
- [route, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
@ -43,14 +56,27 @@
|
||||
COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{% set v2_exists = is_udf_api_v2_compatible() %}
|
||||
{% if v2_exists -%}
|
||||
live.udf_api_v2(
|
||||
'GET',
|
||||
CONCAT_WS('/', 'https://api.github.com', route || '?') || utils.udf_urlencode(query),
|
||||
PARSE_JSON({{ schema_name -}}.headers()),
|
||||
{},
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'Vault/github/api'),
|
||||
TRUE
|
||||
)
|
||||
{%- else -%}
|
||||
live.udf_api(
|
||||
'GET',
|
||||
CONCAT_WS('/', 'https://api.github.com', route || '?') || utils.udf_urlencode(query),
|
||||
PARSE_JSON({{ schema_name -}}.headers()),
|
||||
{},
|
||||
'_FSC_SYS/GITHUB'
|
||||
)
|
||||
- name: {{ schema_name -}}.post
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'Vault/github/api')
|
||||
)
|
||||
{%- endif %}
|
||||
as response
|
||||
- name: {{ schema_name -}}.post_api
|
||||
signature:
|
||||
- [route, "TEXT"]
|
||||
- [data, "OBJECT"]
|
||||
@ -60,14 +86,27 @@
|
||||
COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{% set v2_exists = is_udf_api_v2_compatible() %}
|
||||
{% if v2_exists -%}
|
||||
live.udf_api_v2(
|
||||
'POST',
|
||||
CONCAT_WS('/', 'https://api.github.com', route),
|
||||
PARSE_JSON({{ schema_name -}}.headers()),
|
||||
data,
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'Vault/github/api'),
|
||||
TRUE
|
||||
)
|
||||
{%- else -%}
|
||||
live.udf_api(
|
||||
'POST',
|
||||
CONCAT_WS('/', 'https://api.github.com', route),
|
||||
PARSE_JSON({{ schema_name -}}.headers()),
|
||||
data,
|
||||
'_FSC_SYS/GITHUB'
|
||||
)
|
||||
- name: {{ schema_name -}}.put
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'Vault/github/api')
|
||||
)
|
||||
{%- endif %}
|
||||
as response
|
||||
- name: {{ schema_name -}}.put_api
|
||||
signature:
|
||||
- [route, "TEXT"]
|
||||
- [data, "OBJECT"]
|
||||
@ -77,11 +116,24 @@
|
||||
COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{% set v2_exists = is_udf_api_v2_compatible() %}
|
||||
{% if v2_exists -%}
|
||||
live.udf_api_v2(
|
||||
'PUT',
|
||||
CONCAT_WS('/', 'https://api.github.com', route),
|
||||
PARSE_JSON({{ schema_name -}}.headers()),
|
||||
data,
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'Vault/github/api'),
|
||||
TRUE
|
||||
)
|
||||
{%- else -%}
|
||||
live.udf_api(
|
||||
'PUT',
|
||||
CONCAT_WS('/', 'https://api.github.com', route),
|
||||
PARSE_JSON({{ schema_name -}}.headers()),
|
||||
data,
|
||||
'_FSC_SYS/GITHUB'
|
||||
)
|
||||
{% endmacro %}
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GITHUB', 'Vault/github/api')
|
||||
)
|
||||
{%- endif %}
|
||||
as response
|
||||
{% endmacro %}
|
||||
|
||||
44
macros/marketplace/helius/README.md
Normal file
44
macros/marketplace/helius/README.md
Normal file
@ -0,0 +1,44 @@
|
||||
# Helius API Integration
|
||||
|
||||
Helius provides high-performance Solana RPC infrastructure and enhanced APIs for accessing Solana blockchain data, including DAS (Digital Asset Standard) APIs.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Helius API key from [Helius Dashboard](https://dashboard.helius.dev/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/HELIUS`
|
||||
|
||||
3. Deploy the Helius marketplace functions:
|
||||
```bash
|
||||
dbt run --models helius__ helius_utils__helius_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `helius.get(path, query_args)`
|
||||
Make GET requests to Helius API endpoints.
|
||||
|
||||
### `helius.post(path, body)`
|
||||
Make POST requests to Helius API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get Solana account info
|
||||
SELECT helius.post('/rpc', {
|
||||
'jsonrpc': '2.0',
|
||||
'method': 'getAccountInfo',
|
||||
'params': ['account_address'],
|
||||
'id': 1
|
||||
});
|
||||
|
||||
-- Get compressed NFTs by owner
|
||||
SELECT helius.get('/v0/addresses/owner_address/nfts', {'compressed': true});
|
||||
|
||||
-- Get transaction history
|
||||
SELECT helius.get('/v0/addresses/address/transactions', {'limit': 100});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Helius API Documentation](https://docs.helius.dev/)
|
||||
@ -22,18 +22,37 @@
|
||||
options: |
|
||||
COMMENT = $$Returns the native Solana balance (in lamports) and all token balances for a given address. [Helius docs here](https://docs.helius.xyz/solana-apis/balances-api).$$
|
||||
sql: |
|
||||
SELECT live.udf_api(
|
||||
'GET',
|
||||
CASE
|
||||
WHEN NETWORK = 'devnet' THEN
|
||||
concat('https://api-devnet.helius.xyz/v0/addresses/', ADDRESS, '/balances?api-key={API_KEY}')
|
||||
ELSE
|
||||
concat('https://api.helius.xyz/v0/addresses/', ADDRESS, '/balances?api-key={API_KEY}')
|
||||
END,
|
||||
{},
|
||||
{},
|
||||
'_FSC_SYS/HELIUS'
|
||||
) as response
|
||||
SELECT
|
||||
{% set v2_exists = is_udf_api_v2_compatible() %}
|
||||
{% if v2_exists -%}
|
||||
live.udf_api_v2(
|
||||
'GET',
|
||||
CASE
|
||||
WHEN NETWORK = 'devnet' THEN
|
||||
concat('https://api-devnet.helius.xyz/v0/addresses/', ADDRESS, '/balances?api-key={API_KEY}')
|
||||
ELSE
|
||||
concat('https://api.helius.xyz/v0/addresses/', ADDRESS, '/balances?api-key={API_KEY}')
|
||||
END,
|
||||
{'fsc-quantum-execution-mode': 'async'},
|
||||
{},
|
||||
'_FSC_SYS/HELIUS',
|
||||
TRUE
|
||||
)
|
||||
{%- else -%}
|
||||
live.udf_api(
|
||||
'GET',
|
||||
CASE
|
||||
WHEN NETWORK = 'devnet' THEN
|
||||
concat('https://api-devnet.helius.xyz/v0/addresses/', ADDRESS, '/balances?api-key={API_KEY}')
|
||||
ELSE
|
||||
concat('https://api.helius.xyz/v0/addresses/', ADDRESS, '/balances?api-key={API_KEY}')
|
||||
END,
|
||||
{},
|
||||
{},
|
||||
'_FSC_SYS/HELIUS'
|
||||
)
|
||||
{%- endif %}
|
||||
as response
|
||||
|
||||
- name: {{ schema_name -}}.parse_transactions
|
||||
signature:
|
||||
@ -44,6 +63,6 @@
|
||||
options: |
|
||||
COMMENT = $$Returns an array of enriched, human-readable transactions of the given transaction signatures. Up to 100 transactions per call. [Helius docs here](https://docs.helius.xyz/solana-apis/enhanced-transactions-api/parse-transaction-s).$$
|
||||
sql: |
|
||||
SELECT {{ utils_schema_name -}}.post(NETWORK, '/v0/transactions', {'transactions': TRANSACTIONS}) as response
|
||||
SELECT {{ utils_schema_name -}}.post_api(NETWORK, '/v0/transactions', {'transactions': TRANSACTIONS}) as response
|
||||
|
||||
{% endmacro %}
|
||||
{% endmacro %}
|
||||
|
||||
@ -3,9 +3,9 @@ SELECT {{ schema_name -}}.rpc(NETWORK, '{{method}}', PARAMS) as response
|
||||
{% endmacro %}
|
||||
|
||||
{% macro helius_get_call(schema_name, path) %}
|
||||
SELECT {{ schema_name -}}.get(NETWORK, '{{path}}', QUERY_PARAMS) as response
|
||||
SELECT {{ schema_name -}}.get_api(NETWORK, '{{path}}', QUERY_PARAMS) as response
|
||||
{% endmacro %}
|
||||
|
||||
{% macro helius_post_call(schema_name, path) %}
|
||||
SELECT {{ schema_name -}}.post(NETWORK, '{{path}}', BODY) as response
|
||||
SELECT {{ schema_name -}}.post_api(NETWORK, '{{path}}', BODY) as response
|
||||
{% endmacro %}
|
||||
@ -3,7 +3,7 @@
|
||||
This macro is used to generate the Helius base endpoints
|
||||
#}
|
||||
|
||||
- name: {{ schema -}}.get
|
||||
- name: {{ schema_name }}.get_api
|
||||
signature:
|
||||
- [NETWORK, STRING, The network 'devnet' or 'mainnet']
|
||||
- [PATH, STRING, The API path starting with '/']
|
||||
@ -13,20 +13,39 @@
|
||||
options: |
|
||||
COMMENT = $$Used to issue an HTTP GET request to Helius.$$
|
||||
sql: |
|
||||
SELECT live.udf_api(
|
||||
'GET',
|
||||
CASE
|
||||
WHEN NETWORK = 'devnet' THEN
|
||||
concat('https://api-devnet.helius.xyz', PATH, '?api-key={API_KEY}&', utils.udf_object_to_url_query_string(QUERY_PARAMS))
|
||||
ELSE
|
||||
concat('https://api.helius.xyz', PATH, '?api-key={API_KEY}&', utils.udf_object_to_url_query_string(QUERY_PARAMS))
|
||||
END,
|
||||
{},
|
||||
{},
|
||||
'_FSC_SYS/HELIUS'
|
||||
) as response
|
||||
SELECT
|
||||
{% set v2_exists = is_udf_api_v2_compatible() %}
|
||||
{% if v2_exists -%}
|
||||
live.udf_api_v2(
|
||||
'GET',
|
||||
CASE
|
||||
WHEN NETWORK = 'devnet' THEN
|
||||
concat('https://api-devnet.helius.xyz', PATH, '?api-key={API_KEY}&', utils.udf_object_to_url_query_string(QUERY_PARAMS))
|
||||
ELSE
|
||||
concat('https://api.helius.xyz', PATH, '?api-key={API_KEY}&', utils.udf_object_to_url_query_string(QUERY_PARAMS))
|
||||
END,
|
||||
{'fsc-quantum-execution-mode': 'async'},
|
||||
{},
|
||||
'_FSC_SYS/HELIUS',
|
||||
TRUE
|
||||
)
|
||||
{%- else -%}
|
||||
live.udf_api(
|
||||
'GET',
|
||||
CASE
|
||||
WHEN NETWORK = 'devnet' THEN
|
||||
concat('https://api-devnet.helius.xyz', PATH, '?api-key={API_KEY}&', utils.udf_object_to_url_query_string(QUERY_PARAMS))
|
||||
ELSE
|
||||
concat('https://api.helius.xyz', PATH, '?api-key={API_KEY}&', utils.udf_object_to_url_query_string(QUERY_PARAMS))
|
||||
END,
|
||||
{},
|
||||
{},
|
||||
'_FSC_SYS/HELIUS'
|
||||
)
|
||||
{%- endif %}
|
||||
as response
|
||||
|
||||
- name: {{ schema -}}.post
|
||||
- name: {{ schema_name }}.post_api
|
||||
signature:
|
||||
- [NETWORK, STRING, The network 'devnet' or 'mainnet']
|
||||
- [PATH, STRING, The API path starting with '/']
|
||||
@ -36,20 +55,39 @@
|
||||
options: |
|
||||
COMMENT = $$Used to issue an HTTP POST request to Helius.$$
|
||||
sql: |
|
||||
SELECT live.udf_api(
|
||||
'POST',
|
||||
CASE
|
||||
WHEN NETWORK = 'devnet' THEN
|
||||
concat('https://api-devnet.helius.xyz', PATH, '?api-key={API_KEY}')
|
||||
ELSE
|
||||
concat('https://api.helius.xyz', PATH, '?api-key={API_KEY}')
|
||||
END,
|
||||
{},
|
||||
BODY,
|
||||
'_FSC_SYS/HELIUS'
|
||||
) as response
|
||||
SELECT
|
||||
{% set v2_exists = is_udf_api_v2_compatible() %}
|
||||
{% if v2_exists -%}
|
||||
live.udf_api_v2(
|
||||
'POST',
|
||||
CASE
|
||||
WHEN NETWORK = 'devnet' THEN
|
||||
concat('https://api-devnet.helius.xyz', PATH, '?api-key={API_KEY}')
|
||||
ELSE
|
||||
concat('https://api.helius.xyz', PATH, '?api-key={API_KEY}')
|
||||
END,
|
||||
{'fsc-quantum-execution-mode': 'async'},
|
||||
BODY,
|
||||
'_FSC_SYS/HELIUS',
|
||||
TRUE
|
||||
)
|
||||
{%- else -%}
|
||||
live.udf_api(
|
||||
'POST',
|
||||
CASE
|
||||
WHEN NETWORK = 'devnet' THEN
|
||||
concat('https://api-devnet.helius.xyz', PATH, '?api-key={API_KEY}')
|
||||
ELSE
|
||||
concat('https://api.helius.xyz', PATH, '?api-key={API_KEY}')
|
||||
END,
|
||||
{},
|
||||
BODY,
|
||||
'_FSC_SYS/HELIUS'
|
||||
)
|
||||
{%- endif %}
|
||||
as response
|
||||
|
||||
- name: {{ schema -}}.rpc
|
||||
- name: {{ schema_name }}.rpc
|
||||
signature:
|
||||
- [NETWORK, STRING, The network 'devnet' or 'mainnet']
|
||||
- [METHOD, STRING, The RPC method to call]
|
||||
@ -59,17 +97,36 @@
|
||||
options: |
|
||||
COMMENT = $$Used to issue an RPC call to Helius.$$
|
||||
sql: |
|
||||
SELECT live.udf_api(
|
||||
'POST',
|
||||
CASE
|
||||
WHEN NETWORK = 'devnet' THEN
|
||||
'https://devnet.helius-rpc.com?api-key={API_KEY}'
|
||||
ELSE
|
||||
'https://rpc.helius.xyz?api-key={API_KEY}'
|
||||
END,
|
||||
{},
|
||||
{'id': 1,'jsonrpc': '2.0','method': METHOD,'params': PARAMS},
|
||||
'_FSC_SYS/HELIUS'
|
||||
) as response
|
||||
SELECT
|
||||
{% set v2_exists = is_udf_api_v2_compatible() %}
|
||||
{% if v2_exists -%}
|
||||
live.udf_api_v2(
|
||||
'POST',
|
||||
CASE
|
||||
WHEN NETWORK = 'devnet' THEN
|
||||
'https://devnet.helius-rpc.com?api-key={API_KEY}'
|
||||
ELSE
|
||||
'https://mainnet.helius-rpc.com?api-key={API_KEY}'
|
||||
END,
|
||||
{'fsc-quantum-execution-mode': 'async'},
|
||||
{'id': 1,'jsonrpc': '2.0','method': METHOD,'params': PARAMS},
|
||||
'_FSC_SYS/HELIUS',
|
||||
TRUE
|
||||
)
|
||||
{%- else -%}
|
||||
live.udf_api(
|
||||
'POST',
|
||||
CASE
|
||||
WHEN NETWORK = 'devnet' THEN
|
||||
'https://devnet.helius-rpc.com?api-key={API_KEY}'
|
||||
ELSE
|
||||
'https://mainnet.helius-rpc.com?api-key={API_KEY}'
|
||||
END,
|
||||
{},
|
||||
{'id': 1,'jsonrpc': '2.0','method': METHOD,'params': PARAMS},
|
||||
'_FSC_SYS/HELIUS'
|
||||
)
|
||||
{%- endif %}
|
||||
as response
|
||||
|
||||
{% endmacro %}
|
||||
{% endmacro %}
|
||||
|
||||
36
macros/marketplace/nftscan/README.md
Normal file
36
macros/marketplace/nftscan/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# NFTScan API Integration
|
||||
|
||||
NFTScan is a professional NFT data infrastructure platform providing comprehensive NFT APIs for accessing NFT metadata, transactions, and market data across multiple blockchains.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your NFTScan API key from [NFTScan Developer Portal](https://developer.nftscan.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/NFTSCAN`
|
||||
|
||||
3. Deploy the NFTScan marketplace functions:
|
||||
```bash
|
||||
dbt run --models nftscan__ nftscan_utils__nftscan_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `nftscan.get(path, query_args)`
|
||||
Make GET requests to NFTScan API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get NFT collection statistics
|
||||
SELECT nftscan.get('/api/v2/statistics/collection/eth/0x...', {});
|
||||
|
||||
-- Get NFTs owned by an address
|
||||
SELECT nftscan.get('/api/v2/account/own/eth/0x...', {'show_attribute': 'true', 'limit': 100});
|
||||
|
||||
-- Get NFT transaction history
|
||||
SELECT nftscan.get('/api/v2/transactions/account/eth/0x...', {'event_type': 'Sale', 'limit': 50});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [NFTScan API Documentation](https://developer.nftscan.com/)
|
||||
39
macros/marketplace/opensea/README.md
Normal file
39
macros/marketplace/opensea/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# OpenSea API Integration
|
||||
|
||||
OpenSea is the world's largest NFT marketplace, providing APIs for accessing NFT collections, listings, sales data, and marketplace activities.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your OpenSea API key from [OpenSea Developer Portal](https://docs.opensea.io/reference/api-keys)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/OPENSEA`
|
||||
|
||||
3. Deploy the OpenSea marketplace functions:
|
||||
```bash
|
||||
dbt run --models opensea__ opensea_utils__opensea_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `opensea.get(path, query_args)`
|
||||
Make GET requests to OpenSea API endpoints.
|
||||
|
||||
### `opensea.post(path, body)`
|
||||
Make POST requests to OpenSea API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get NFT collection stats
|
||||
SELECT opensea.get('/api/v2/collections/boredapeyachtclub/stats', {});
|
||||
|
||||
-- Get NFT listings
|
||||
SELECT opensea.get('/api/v2/orders/ethereum/seaport/listings', {'limit': 20});
|
||||
|
||||
-- Get collection events
|
||||
SELECT opensea.get('/api/v2/events/collection/boredapeyachtclub', {'event_type': 'sale'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [OpenSea API Documentation](https://docs.opensea.io/reference/api-overview)
|
||||
39
macros/marketplace/playgrounds/README.md
Normal file
39
macros/marketplace/playgrounds/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Playgrounds API Integration
|
||||
|
||||
Playgrounds provides gaming and entertainment data APIs with access to game statistics, player data, and gaming platform analytics.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Playgrounds API key from [Playgrounds Developer Portal](https://playgrounds.com/developers)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/PLAYGROUNDS`
|
||||
|
||||
3. Deploy the Playgrounds marketplace functions:
|
||||
```bash
|
||||
dbt run --models playgrounds__ playgrounds_utils__playgrounds_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `playgrounds.get(path, query_args)`
|
||||
Make GET requests to Playgrounds API endpoints.
|
||||
|
||||
### `playgrounds.post(path, body)`
|
||||
Make POST requests to Playgrounds API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get game statistics
|
||||
SELECT playgrounds.get('/api/v1/games/stats', {'game_id': 'fortnite'});
|
||||
|
||||
-- Get player rankings
|
||||
SELECT playgrounds.get('/api/v1/leaderboards', {'game': 'valorant', 'region': 'na'});
|
||||
|
||||
-- Get tournament data
|
||||
SELECT playgrounds.get('/api/v1/tournaments', {'status': 'active', 'limit': 50});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Playgrounds API Documentation](https://docs.playgrounds.com/)
|
||||
44
macros/marketplace/quicknode/README.md
Normal file
44
macros/marketplace/quicknode/README.md
Normal file
@ -0,0 +1,44 @@
|
||||
# QuickNode API Integration
|
||||
|
||||
QuickNode provides high-performance blockchain infrastructure with RPC endpoints and enhanced APIs for Ethereum, Polygon, Solana, and other networks.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your QuickNode endpoint and API key from [QuickNode Dashboard](https://dashboard.quicknode.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/QUICKNODE`
|
||||
|
||||
3. Deploy the QuickNode marketplace functions:
|
||||
```bash
|
||||
dbt run --models quicknode__ quicknode_utils__quicknode_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `quicknode.get(path, query_args)`
|
||||
Make GET requests to QuickNode API endpoints.
|
||||
|
||||
### `quicknode.post(path, body)`
|
||||
Make POST requests to QuickNode API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get latest block number
|
||||
SELECT quicknode.post('/rpc', {
|
||||
'jsonrpc': '2.0',
|
||||
'method': 'eth_blockNumber',
|
||||
'params': [],
|
||||
'id': 1
|
||||
});
|
||||
|
||||
-- Get NFT metadata
|
||||
SELECT quicknode.get('/nft/v1/ethereum/nft/0x.../1', {});
|
||||
|
||||
-- Get token transfers
|
||||
SELECT quicknode.get('/token/v1/ethereum/transfers', {'address': '0x...', 'limit': 100});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [QuickNode API Documentation](https://www.quicknode.com/docs/)
|
||||
39
macros/marketplace/reservoir/README.md
Normal file
39
macros/marketplace/reservoir/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Reservoir API Integration
|
||||
|
||||
Reservoir provides comprehensive NFT data infrastructure with APIs for accessing real-time NFT market data, collections, sales, and aggregated marketplace information.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Reservoir API key from [Reservoir Dashboard](https://reservoir.tools/dashboard)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/RESERVOIR`
|
||||
|
||||
3. Deploy the Reservoir marketplace functions:
|
||||
```bash
|
||||
dbt run --models reservoir__ reservoir_utils__reservoir_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `reservoir.get(path, query_args)`
|
||||
Make GET requests to Reservoir API endpoints.
|
||||
|
||||
### `reservoir.post(path, body)`
|
||||
Make POST requests to Reservoir API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get collection floor prices
|
||||
SELECT reservoir.get('/collections/v7', {'id': '0x...', 'includeTopBid': 'true'});
|
||||
|
||||
-- Get recent sales
|
||||
SELECT reservoir.get('/sales/v6', {'collection': '0x...', 'limit': 100});
|
||||
|
||||
-- Get token details
|
||||
SELECT reservoir.get('/tokens/v7', {'collection': '0x...', 'tokenId': '1234'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Reservoir API Documentation](https://docs.reservoir.tools/)
|
||||
@ -17,7 +17,7 @@
|
||||
'GET',
|
||||
concat('https://api.reservoir.tools', PATH, '?', utils.udf_object_to_url_query_string(QUERY_ARGS)),
|
||||
{'x-api-key': '{API_KEY}'},
|
||||
{},
|
||||
NULL,
|
||||
'_FSC_SYS/RESERVOIR'
|
||||
) as response
|
||||
|
||||
@ -39,4 +39,4 @@
|
||||
'_FSC_SYS/RESERVOIR'
|
||||
) as response
|
||||
|
||||
{% endmacro %}
|
||||
{% endmacro %}
|
||||
|
||||
294
macros/marketplace/slack/README.md
Normal file
294
macros/marketplace/slack/README.md
Normal file
@ -0,0 +1,294 @@
|
||||
# Slack Integration for Livequery
|
||||
|
||||
A straightforward Slack integration that uses secure vault-stored credentials. You construct the payload according to Slack's API spec, and Livequery delivers it using credentials stored in the vault.
|
||||
|
||||
## Prerequisites & Setup
|
||||
|
||||
### Option 1: Webhook Mode (Simpler, No Threading)
|
||||
|
||||
**When to use:** Simple notifications without threading support.
|
||||
|
||||
**Setup Steps:**
|
||||
1. Go to [Slack Apps](https://api.slack.com/apps) and create a new app
|
||||
2. Choose "From scratch" and select your workspace
|
||||
3. Go to "Incoming Webhooks" and toggle "Activate Incoming Webhooks" to On
|
||||
4. Click "Add New Webhook to Workspace"
|
||||
5. Select the channel and click "Allow"
|
||||
6. Copy the webhook URL (starts with `https://hooks.slack.com/services/...`)
|
||||
7. Store the webhook URL in the vault under a secret name (e.g., 'alerts', 'notifications')
|
||||
8. Use `slack_utils.post_webhook(secret_name, payload)`
|
||||
|
||||
**Limitations:**
|
||||
- ❌ No threading support (cannot use `slack.post_reply()`)
|
||||
- ❌ Cannot send to different channels dynamically
|
||||
- ✅ Simple setup, no bot permissions needed
|
||||
|
||||
### Option 2: Web API Mode (Full Features + Threading)
|
||||
|
||||
**When to use:** Need threading support, multiple channels, or advanced features.
|
||||
|
||||
**Setup Steps:**
|
||||
1. Go to [Slack Apps](https://api.slack.com/apps) and create a new app
|
||||
2. Choose "From scratch" and select your workspace
|
||||
3. Go to "OAuth & Permissions" in the sidebar
|
||||
4. Under "Scopes" → "Bot Token Scopes", add these permissions:
|
||||
- `chat:write` - Send messages
|
||||
- `channels:read` - Access public channel information
|
||||
- `groups:read` - Access private channel information (if needed)
|
||||
5. Click "Install to Workspace" at the top
|
||||
6. Click "Allow" to grant permissions
|
||||
7. Copy the "Bot User OAuth Token" (starts with `xoxb-...`)
|
||||
8. Store the bot token in the vault (Livequery handles this automatically)
|
||||
9. **Important:** Invite the bot to your channel:
|
||||
- Go to your Slack channel
|
||||
- Type `/invite @YourBotName` (replace with your bot's name)
|
||||
- Or go to channel settings → Integrations → Add apps → Select your bot
|
||||
10. Get the channel ID:
|
||||
- Right-click your channel name → "Copy Link"
|
||||
- Extract the ID from URL: `https://yourworkspace.slack.com/archives/C087GJQ1ZHQ` → `C087GJQ1ZHQ`
|
||||
11. Use `slack.post_message(channel, payload)` and `slack.post_reply()` for threading
|
||||
|
||||
**Features:**
|
||||
- ✅ Threading support with `slack.post_reply()`
|
||||
- ✅ Send to any channel the bot is invited to
|
||||
- ✅ More control and flexibility
|
||||
- ❌ Requires bot setup and channel invitations
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Basic Webhook Message
|
||||
```sql
|
||||
-- Send a simple message via webhook
|
||||
SELECT slack_utils.post_webhook(
|
||||
'alerts', -- Secret name in vault
|
||||
{
|
||||
'text': 'Hello from Livequery!',
|
||||
'username': 'Data Bot'
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
### Web API Message
|
||||
```sql
|
||||
-- Send message to a channel
|
||||
SELECT slack.post_message(
|
||||
'C087GJQ1ZHQ', -- Channel ID
|
||||
{
|
||||
'text': 'Pipeline completed!',
|
||||
'blocks': [
|
||||
{
|
||||
'type': 'header',
|
||||
'text': {
|
||||
'type': 'plain_text',
|
||||
'text': ':white_check_mark: Pipeline Success'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
### Threading Example
|
||||
```sql
|
||||
-- First send main message
|
||||
WITH main_message AS (
|
||||
SELECT slack.post_message(
|
||||
'C087GJQ1ZHQ',
|
||||
{'text': 'Pipeline failed with 3 errors. Details in thread...'}
|
||||
) as response
|
||||
)
|
||||
-- Then send threaded reply
|
||||
SELECT slack.post_reply(
|
||||
'C087GJQ1ZHQ',
|
||||
main_message.response:data:ts::STRING, -- Use timestamp from main message
|
||||
{'text': 'Error 1: Database connection timeout'}
|
||||
) as thread_response
|
||||
FROM main_message;
|
||||
```
|
||||
|
||||
## Functions Reference
|
||||
|
||||
### `slack_utils.post_webhook(secret_name, payload)`
|
||||
Send messages via Slack Incoming Webhooks using vault-stored webhook URL.
|
||||
|
||||
**Parameters:**
|
||||
- `secret_name` - Name of webhook secret stored in vault (e.g., 'alerts', 'notifications')
|
||||
- `payload` - JSON object following [Slack webhook format](https://api.slack.com/messaging/webhooks)
|
||||
|
||||
**Example:**
|
||||
```sql
|
||||
SELECT slack_utils.post_webhook(
|
||||
'notifications',
|
||||
{
|
||||
'text': 'dbt run completed successfully',
|
||||
'username': 'dbt Bot',
|
||||
'icon_emoji': ':white_check_mark:'
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
### `slack.post_message(channel, payload)`
|
||||
Send messages via Slack Web API (chat.postMessage) using vault-stored bot token.
|
||||
|
||||
**Parameters:**
|
||||
- `channel` - Channel ID (C...) or name (#channel)
|
||||
- `payload` - JSON object following [Slack chat.postMessage format](https://api.slack.com/methods/chat.postMessage)
|
||||
|
||||
**Example:**
|
||||
```sql
|
||||
SELECT slack.post_message(
|
||||
'C087GJQ1ZHQ',
|
||||
{
|
||||
'text': 'Model update complete',
|
||||
'attachments': [
|
||||
{
|
||||
'color': 'good',
|
||||
'title': 'Success',
|
||||
'fields': [
|
||||
{'title': 'Models', 'value': '15', 'short': true},
|
||||
{'title': 'Duration', 'value': '2m 30s', 'short': true}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
### `slack.post_reply(channel, thread_ts, payload)`
|
||||
Send threaded replies via Slack Web API using vault-stored bot token.
|
||||
|
||||
**Parameters:**
|
||||
- `channel` - Channel ID or name
|
||||
- `thread_ts` - Parent message timestamp for threading
|
||||
- `payload` - JSON object following Slack chat.postMessage format
|
||||
|
||||
**Example:**
|
||||
```sql
|
||||
SELECT slack.post_reply(
|
||||
'C087GJQ1ZHQ',
|
||||
'1698765432.123456', -- Parent message timestamp
|
||||
{'text': 'Additional details in this thread'}
|
||||
);
|
||||
```
|
||||
|
||||
### `slack.webhook_send(secret_name, payload)`
|
||||
Alias for `slack_utils.post_webhook()` - sends webhook messages using vault-stored URL.
|
||||
|
||||
**Parameters:**
|
||||
- `secret_name` - Name of webhook secret stored in vault
|
||||
- `payload` - JSON object following Slack webhook format
|
||||
|
||||
### Validation Functions
|
||||
- `slack_utils.validate_webhook_url(url)` - Check if webhook URL format is valid
|
||||
- `slack_utils.validate_bot_token(token)` - Check if bot token format is valid
|
||||
- `slack_utils.validate_channel(channel)` - Check if channel format is valid
|
||||
|
||||
## Vault Configuration
|
||||
|
||||
### Webhook Secrets
|
||||
Store your webhook URLs in the vault with meaningful names:
|
||||
- `alerts` - For critical alerts
|
||||
- `notifications` - For general notifications
|
||||
- `marketing` - For marketing team updates
|
||||
|
||||
### Bot Token
|
||||
The bot token is automatically managed by Livequery and stored securely in the vault. You don't need to provide it in function calls.
|
||||
|
||||
## Testing Without Spamming Slack
|
||||
|
||||
### Built-in Tests
|
||||
The integration includes comprehensive tests that verify functionality without hitting real Slack channels.
|
||||
|
||||
### Manual Testing Options
|
||||
|
||||
#### 1. Test with httpbin.org (Recommended for Development)
|
||||
```sql
|
||||
-- Test webhook functionality without hitting Slack
|
||||
-- (Note: This bypasses vault and uses direct URL for testing)
|
||||
SELECT slack_utils.post_webhook(
|
||||
'test-httpbin', -- Create test secret pointing to httpbin.org
|
||||
{'text': 'Test message', 'username': 'Test Bot'}
|
||||
);
|
||||
```
|
||||
|
||||
#### 2. Test Workspace (Real Slack Testing)
|
||||
Create a dedicated test workspace or use a private test channel:
|
||||
- Store test webhook URLs in vault with names like `test-alerts`
|
||||
- Use test channel IDs for `post_message()` calls
|
||||
- Set up separate vault secrets for testing vs production
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Rich Message Formatting
|
||||
```sql
|
||||
-- Advanced Block Kit message
|
||||
SELECT slack.post_message(
|
||||
'C087GJQ1ZHQ',
|
||||
{
|
||||
'text': 'Data Pipeline Report',
|
||||
'blocks': [
|
||||
{
|
||||
'type': 'header',
|
||||
'text': {
|
||||
'type': 'plain_text',
|
||||
'text': '📊 Daily Data Pipeline Report'
|
||||
}
|
||||
},
|
||||
{
|
||||
'type': 'section',
|
||||
'fields': [
|
||||
{'type': 'mrkdwn', 'text': '*Environment:*\nProduction'},
|
||||
{'type': 'mrkdwn', 'text': '*Models Run:*\n25'},
|
||||
{'type': 'mrkdwn', 'text': '*Duration:*\n12m 34s'},
|
||||
{'type': 'mrkdwn', 'text': '*Status:*\n✅ Success'}
|
||||
]
|
||||
},
|
||||
{
|
||||
'type': 'actions',
|
||||
'elements': [
|
||||
{
|
||||
'type': 'button',
|
||||
'text': {'type': 'plain_text', 'text': 'View Logs'},
|
||||
'url': 'https://your-logs-url.com'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
```sql
|
||||
-- Check response for errors
|
||||
WITH slack_result AS (
|
||||
SELECT slack_utils.post_webhook(
|
||||
'alerts',
|
||||
{'text': 'Test message'}
|
||||
) as response
|
||||
)
|
||||
SELECT
|
||||
response:ok::BOOLEAN as success,
|
||||
response:error::STRING as error_message,
|
||||
CASE
|
||||
WHEN response:ok::BOOLEAN THEN 'Message sent successfully'
|
||||
ELSE 'Failed: ' || response:error::STRING
|
||||
END as status
|
||||
FROM slack_result;
|
||||
```
|
||||
|
||||
## How It Works
|
||||
|
||||
1. **Secure credential storage** - Webhook URLs and bot tokens are stored in Livequery's vault
|
||||
2. **You construct the payload** - Use Slack's official API documentation to build your JSON
|
||||
3. **Livequery delivers it** - We handle authentication and HTTP requests to Slack
|
||||
4. **Get the response** - Standard Slack API response with success/error info
|
||||
|
||||
## Slack API Documentation
|
||||
|
||||
- [Webhook Format](https://api.slack.com/messaging/webhooks) - For webhook messages
|
||||
- [chat.postMessage](https://api.slack.com/methods/chat.postMessage) - For Web API messages
|
||||
- [Block Kit](https://api.slack.com/block-kit) - For rich interactive messages
|
||||
- [Message Formatting](https://api.slack.com/reference/surfaces/formatting) - Text formatting guide
|
||||
|
||||
That's it! Secure, simple Slack integration with vault-managed credentials.
|
||||
90
macros/marketplace/slack/messaging_udfs.yaml.sql
Normal file
90
macros/marketplace/slack/messaging_udfs.yaml.sql
Normal file
@ -0,0 +1,90 @@
|
||||
{% macro config_slack_messaging_udfs(schema_name = "slack", utils_schema_name = "slack_utils") -%}
|
||||
{#
|
||||
This macro is used to generate API calls to Slack API endpoints
|
||||
#}
|
||||
|
||||
{# Slack Webhook Messages #}
|
||||
- name: {{ schema_name }}.webhook_send
|
||||
signature:
|
||||
- [WEBHOOK_SECRET_NAME, STRING, "Name of webhook secret in vault (e.g., 'alerts', 'notifications')"]
|
||||
- [PAYLOAD, OBJECT, Complete Slack message payload according to Slack API spec]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = 'Send a message to Slack via webhook [API docs: Webhooks](https://api.slack.com/messaging/webhooks)'
|
||||
sql: |
|
||||
SELECT slack_utils.post_webhook(
|
||||
WEBHOOK_SECRET_NAME,
|
||||
PAYLOAD
|
||||
) as response
|
||||
|
||||
{# Slack Web API Messages #}
|
||||
- name: {{ schema_name }}.post_message
|
||||
signature:
|
||||
- [CHANNEL, STRING, "Slack channel ID (e.g. 'C1234567890')"]
|
||||
- [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec]
|
||||
- [BOT_SECRET_NAME, STRING, "Name of bot token secret in vault (optional, default: 'intelligence')"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = 'Send a message to Slack via Web API with custom bot token [API docs: chat.postMessage](https://api.slack.com/methods/chat.postMessage)'
|
||||
sql: |
|
||||
SELECT slack_utils.post_message(
|
||||
CHANNEL,
|
||||
PAYLOAD,
|
||||
COALESCE(BOT_SECRET_NAME, 'intelligence')
|
||||
) as response
|
||||
|
||||
- name: {{ schema_name }}.post_message
|
||||
signature:
|
||||
- [CHANNEL, STRING, "Slack channel ID (e.g. 'C1234567890')"]
|
||||
- [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = 'Send a message to Slack via Web API [API docs: chat.postMessage](https://api.slack.com/methods/chat.postMessage)'
|
||||
sql: |
|
||||
SELECT {{ schema_name }}.post_message(
|
||||
CHANNEL,
|
||||
PAYLOAD,
|
||||
'intelligence'
|
||||
) as response
|
||||
|
||||
- name: {{ schema_name }}.post_reply
|
||||
signature:
|
||||
- [CHANNEL, STRING, "Slack channel ID (e.g. 'C1234567890')"]
|
||||
- [THREAD_TS, STRING, Parent message timestamp for threading]
|
||||
- [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec]
|
||||
- [BOT_SECRET_NAME, STRING, "Name of bot token secret in vault (optional, default: 'intelligence')"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = 'Send a threaded reply to Slack via Web API with custom bot token [API docs: chat.postMessage](https://api.slack.com/methods/chat.postMessage)'
|
||||
sql: |
|
||||
SELECT slack_utils.post_reply(
|
||||
CHANNEL,
|
||||
THREAD_TS,
|
||||
PAYLOAD,
|
||||
COALESCE(BOT_SECRET_NAME, 'intelligence')
|
||||
) as response
|
||||
|
||||
|
||||
- name: {{ schema_name }}.post_reply
|
||||
signature:
|
||||
- [CHANNEL, STRING, "Slack channel ID (e.g. 'C1234567890')"]
|
||||
- [THREAD_TS, STRING, Parent message timestamp for threading]
|
||||
- [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = 'Send a threaded reply to Slack via Web API [API docs: chat.postMessage](https://api.slack.com/methods/chat.postMessage)'
|
||||
sql: |
|
||||
SELECT {{ schema_name }}.post_reply(
|
||||
CHANNEL,
|
||||
THREAD_TS,
|
||||
PAYLOAD,
|
||||
'intelligence'
|
||||
) as response
|
||||
|
||||
|
||||
{% endmacro %}
|
||||
220
macros/marketplace/slack/utils_udfs.yaml.sql
Normal file
220
macros/marketplace/slack/utils_udfs.yaml.sql
Normal file
@ -0,0 +1,220 @@
|
||||
{% macro config_slack_utils_udfs(schema_name = "slack_utils", utils_schema_name = "slack_utils") -%}
|
||||
{#
|
||||
This macro is used to generate API calls to Slack API endpoints
|
||||
#}
|
||||
- name: {{ schema_name }}.post_webhook
|
||||
signature:
|
||||
- [WEBHOOK_SECRET_NAME, STRING, "Name of webhook secret in vault (e.g., 'alerts', 'notifications')"]
|
||||
- [PAYLOAD, OBJECT, Complete Slack message payload according to Slack API spec]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Send a message to Slack via webhook. User provides secret name for webhook URL stored in vault.$$
|
||||
sql: |
|
||||
SELECT CASE
|
||||
WHEN WEBHOOK_SECRET_NAME IS NULL OR WEBHOOK_SECRET_NAME = '' THEN
|
||||
OBJECT_CONSTRUCT('ok', false, 'error', 'webhook_secret_name is required')
|
||||
WHEN PAYLOAD IS NULL THEN
|
||||
OBJECT_CONSTRUCT('ok', false, 'error', 'payload is required')
|
||||
ELSE
|
||||
{% set v2_exists = is_udf_api_v2_compatible() %}
|
||||
{% if v2_exists -%}
|
||||
live.udf_api_v2(
|
||||
'POST',
|
||||
'{WEBHOOK_URL}',
|
||||
OBJECT_CONSTRUCT('Content-Type', 'application/json'),
|
||||
PAYLOAD,
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(),
|
||||
'_FSC_SYS/SLACK/' || WEBHOOK_SECRET_NAME,
|
||||
'Vault/prod/data_platform/slack/' || WEBHOOK_SECRET_NAME),
|
||||
TRUE
|
||||
)
|
||||
{%- else -%}
|
||||
live.udf_api(
|
||||
'POST',
|
||||
'{WEBHOOK_URL}',
|
||||
OBJECT_CONSTRUCT('Content-Type', 'application/json'),
|
||||
PAYLOAD,
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(),
|
||||
'_FSC_SYS/SLACK/' || WEBHOOK_SECRET_NAME,
|
||||
'Vault/prod/data_platform/slack/' || WEBHOOK_SECRET_NAME)
|
||||
)
|
||||
{%- endif %}
|
||||
END as response
|
||||
|
||||
- name: {{ schema_name }}.post_message
|
||||
signature:
|
||||
- [CHANNEL, STRING, "Slack channel ID (e.g. 'C1234567890')"]
|
||||
- [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec]
|
||||
- [BOT_SECRET_NAME, STRING, "Name of bot token secret in vault (optional, default: 'intelligence')"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Send a message to Slack via Web API chat.postMessage with custom bot token. User provides complete payload according to Slack API spec.$$
|
||||
sql: |
|
||||
SELECT CASE
|
||||
WHEN CHANNEL IS NULL OR CHANNEL = '' THEN
|
||||
OBJECT_CONSTRUCT('ok', false, 'error', 'channel is required')
|
||||
WHEN PAYLOAD IS NULL THEN
|
||||
OBJECT_CONSTRUCT('ok', false, 'error', 'payload is required')
|
||||
ELSE
|
||||
{% set v2_exists = is_udf_api_v2_compatible() %}
|
||||
{% if v2_exists -%}
|
||||
live.udf_api_v2(
|
||||
'POST',
|
||||
'https://slack.com/api/chat.postMessage',
|
||||
OBJECT_CONSTRUCT(
|
||||
'Authorization', 'Bearer {BOT_TOKEN}',
|
||||
'Content-Type', 'application/json'
|
||||
),
|
||||
OBJECT_INSERT(PAYLOAD, 'channel', CHANNEL),
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(),
|
||||
'_FSC_SYS/SLACK/' || COALESCE(BOT_SECRET_NAME, 'intelligence'),
|
||||
'Vault/prod/data_platform/slack/' || COALESCE(BOT_SECRET_NAME, 'intelligence')),
|
||||
TRUE
|
||||
)
|
||||
{%- else -%}
|
||||
live.udf_api(
|
||||
'POST',
|
||||
'https://slack.com/api/chat.postMessage',
|
||||
OBJECT_CONSTRUCT(
|
||||
'Authorization', 'Bearer {BOT_TOKEN}',
|
||||
'Content-Type', 'application/json'
|
||||
),
|
||||
OBJECT_INSERT(PAYLOAD, 'channel', CHANNEL),
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(),
|
||||
'_FSC_SYS/SLACK/' || COALESCE(BOT_SECRET_NAME, 'intelligence'),
|
||||
'Vault/prod/data_platform/slack/' || COALESCE(BOT_SECRET_NAME, 'intelligence'))
|
||||
)
|
||||
{%- endif %}
|
||||
END as response
|
||||
|
||||
- name: {{ schema_name }}.post_message
|
||||
signature:
|
||||
- [CHANNEL, STRING, "Slack channel ID (e.g. 'C1234567890')"]
|
||||
- [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Send a message to Slack via Web API chat.postMessage. User provides complete payload according to Slack API spec.$$
|
||||
sql: |
|
||||
SELECT {{ schema_name }}.post_message(
|
||||
CHANNEL,
|
||||
PAYLOAD,
|
||||
'intelligence'
|
||||
) as response
|
||||
|
||||
- name: {{ schema_name }}.post_reply
|
||||
signature:
|
||||
- [CHANNEL, STRING, "Slack channel ID (e.g. 'C1234567890')"]
|
||||
- [THREAD_TS, STRING, Parent message timestamp for threading]
|
||||
- [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec]
|
||||
- [BOT_SECRET_NAME, STRING, "Name of bot token secret in vault (optional, default: 'intelligence')"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Send a threaded reply to Slack via Web API with custom bot token. User provides complete payload according to Slack API spec.$$
|
||||
sql: |
|
||||
SELECT CASE
|
||||
WHEN CHANNEL IS NULL OR CHANNEL = '' THEN
|
||||
OBJECT_CONSTRUCT('ok', false, 'error', 'channel is required')
|
||||
WHEN THREAD_TS IS NULL OR THREAD_TS = '' THEN
|
||||
OBJECT_CONSTRUCT('ok', false, 'error', 'thread_ts is required')
|
||||
WHEN PAYLOAD IS NULL THEN
|
||||
OBJECT_CONSTRUCT('ok', false, 'error', 'payload is required')
|
||||
ELSE
|
||||
{% set v2_exists = is_udf_api_v2_compatible() %}
|
||||
{% if v2_exists -%}
|
||||
live.udf_api_v2(
|
||||
'POST',
|
||||
'https://slack.com/api/chat.postMessage',
|
||||
OBJECT_CONSTRUCT(
|
||||
'Authorization', 'Bearer {BOT_TOKEN}',
|
||||
'Content-Type', 'application/json'
|
||||
),
|
||||
OBJECT_INSERT(
|
||||
OBJECT_INSERT(PAYLOAD, 'channel', CHANNEL),
|
||||
'thread_ts', THREAD_TS
|
||||
),
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(),
|
||||
'_FSC_SYS/SLACK/' || COALESCE(BOT_SECRET_NAME, 'intelligence'),
|
||||
'Vault/prod/data_platform/slack/' || COALESCE(BOT_SECRET_NAME, 'intelligence')),
|
||||
TRUE
|
||||
)
|
||||
{%- else -%}
|
||||
live.udf_api(
|
||||
'POST',
|
||||
'https://slack.com/api/chat.postMessage',
|
||||
OBJECT_CONSTRUCT(
|
||||
'Authorization', 'Bearer {BOT_TOKEN}',
|
||||
'Content-Type', 'application/json'
|
||||
),
|
||||
OBJECT_INSERT(
|
||||
OBJECT_INSERT(PAYLOAD, 'channel', CHANNEL),
|
||||
'thread_ts', THREAD_TS
|
||||
),
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(),
|
||||
'_FSC_SYS/SLACK/' || COALESCE(BOT_SECRET_NAME, 'intelligence'),
|
||||
'Vault/prod/data_platform/slack/' || COALESCE(BOT_SECRET_NAME, 'intelligence'))
|
||||
)
|
||||
{%- endif %}
|
||||
END as response
|
||||
|
||||
- name: {{ schema_name }}.post_reply
|
||||
signature:
|
||||
- [CHANNEL, STRING, "Slack channel ID (e.g. 'C1234567890')"]
|
||||
- [THREAD_TS, STRING, Parent message timestamp for threading]
|
||||
- [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Send a threaded reply to Slack via Web API. User provides complete payload according to Slack API spec.$$
|
||||
sql: |
|
||||
SELECT {{ schema_name }}.post_reply(
|
||||
CHANNEL,
|
||||
THREAD_TS,
|
||||
PAYLOAD,
|
||||
'intelligence'
|
||||
) as response
|
||||
|
||||
- name: {{ schema_name }}.validate_webhook_url
|
||||
signature:
|
||||
- [WEBHOOK_URL, STRING, Webhook URL to validate]
|
||||
return_type:
|
||||
- "BOOLEAN"
|
||||
options: |
|
||||
COMMENT = $$Validate if a string is a proper Slack webhook URL format.$$
|
||||
sql: |
|
||||
SELECT WEBHOOK_URL IS NOT NULL
|
||||
AND STARTSWITH(WEBHOOK_URL, 'https://hooks.slack.com/services/')
|
||||
AND LENGTH(WEBHOOK_URL) > 50
|
||||
|
||||
- name: {{ schema_name }}.validate_bot_token
|
||||
signature:
|
||||
- [BOT_TOKEN, STRING, Bot token to validate]
|
||||
return_type:
|
||||
- "BOOLEAN"
|
||||
options: |
|
||||
COMMENT = $$Validate if a string is a proper Slack bot token format.$$
|
||||
sql: |
|
||||
SELECT BOT_TOKEN IS NOT NULL
|
||||
AND STARTSWITH(BOT_TOKEN, 'xoxb-')
|
||||
AND LENGTH(BOT_TOKEN) > 20
|
||||
|
||||
- name: {{ schema_name }}.validate_channel
|
||||
signature:
|
||||
- [CHANNEL, STRING, "Channel ID to validate"]
|
||||
return_type:
|
||||
- "BOOLEAN"
|
||||
options: |
|
||||
COMMENT = $$Validate if a string is a proper Slack channel ID format (API requires IDs, not names).$$
|
||||
sql: |
|
||||
SELECT CHANNEL IS NOT NULL
|
||||
AND LENGTH(CHANNEL) > 0
|
||||
AND (
|
||||
STARTSWITH(CHANNEL, 'C') OR -- Public channel ID
|
||||
STARTSWITH(CHANNEL, 'D') OR -- Direct message ID
|
||||
STARTSWITH(CHANNEL, 'G') -- Private channel/group ID
|
||||
)
|
||||
|
||||
{% endmacro %}
|
||||
45
macros/marketplace/snapshot/README.md
Normal file
45
macros/marketplace/snapshot/README.md
Normal file
@ -0,0 +1,45 @@
|
||||
# Snapshot API Integration
|
||||
|
||||
Snapshot is a decentralized voting platform that provides APIs for accessing DAO governance data, proposals, votes, and community participation metrics.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Snapshot API key from [Snapshot Hub](https://snapshot.org/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/SNAPSHOT`
|
||||
|
||||
3. Deploy the Snapshot marketplace functions:
|
||||
```bash
|
||||
dbt run --models snapshot__ snapshot_utils__snapshot_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `snapshot.get(path, query_args)`
|
||||
Make GET requests to Snapshot API endpoints.
|
||||
|
||||
### `snapshot.post(path, body)`
|
||||
Make POST requests to Snapshot GraphQL API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get DAO spaces
|
||||
SELECT snapshot.post('/graphql', {
|
||||
'query': 'query { spaces(first: 20, orderBy: "created", orderDirection: desc) { id name } }'
|
||||
});
|
||||
|
||||
-- Get proposals for a space
|
||||
SELECT snapshot.post('/graphql', {
|
||||
'query': 'query { proposals(first: 10, where: {space: "uniswap"}) { id title state } }'
|
||||
});
|
||||
|
||||
-- Get votes for a proposal
|
||||
SELECT snapshot.post('/graphql', {
|
||||
'query': 'query { votes(first: 100, where: {proposal: "proposal_id"}) { voter choice } }'
|
||||
});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Snapshot API Documentation](https://docs.snapshot.org/)
|
||||
36
macros/marketplace/solscan/README.md
Normal file
36
macros/marketplace/solscan/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# Solscan API Integration
|
||||
|
||||
Solscan is a leading Solana blockchain explorer providing comprehensive APIs for accessing Solana transaction data, account information, and network statistics.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Solscan API key from [Solscan API Portal](https://pro-api.solscan.io/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/SOLSCAN`
|
||||
|
||||
3. Deploy the Solscan marketplace functions:
|
||||
```bash
|
||||
dbt run --models solscan__ solscan_utils__solscan_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `solscan.get(path, query_args)`
|
||||
Make GET requests to Solscan API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get account information
|
||||
SELECT solscan.get('/account', {'address': 'account_address'});
|
||||
|
||||
-- Get transaction details
|
||||
SELECT solscan.get('/transaction', {'signature': 'transaction_signature'});
|
||||
|
||||
-- Get token information
|
||||
SELECT solscan.get('/token/meta', {'token': 'token_address'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Solscan API Documentation](https://docs.solscan.io/)
|
||||
36
macros/marketplace/stakingrewards/README.md
Normal file
36
macros/marketplace/stakingrewards/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# Staking Rewards API Integration
|
||||
|
||||
Staking Rewards provides comprehensive data on cryptocurrency staking opportunities, validator performance, and yield farming across multiple blockchain networks.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Staking Rewards API key from [Staking Rewards API Portal](https://stakingrewards.com/api)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/STAKINGREWARDS`
|
||||
|
||||
3. Deploy the Staking Rewards marketplace functions:
|
||||
```bash
|
||||
dbt run --models stakingrewards__ stakingrewards_utils__stakingrewards_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `stakingrewards.get(path, query_args)`
|
||||
Make GET requests to Staking Rewards API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get staking assets
|
||||
SELECT stakingrewards.get('/assets', {'limit': 100});
|
||||
|
||||
-- Get validator information
|
||||
SELECT stakingrewards.get('/validators', {'asset': 'ethereum', 'limit': 50});
|
||||
|
||||
-- Get staking rewards data
|
||||
SELECT stakingrewards.get('/rewards', {'asset': 'solana', 'timeframe': '30d'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Staking Rewards API Documentation](https://docs.stakingrewards.com/)
|
||||
39
macros/marketplace/strangelove/README.md
Normal file
39
macros/marketplace/strangelove/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Strangelove API Integration
|
||||
|
||||
Strangelove provides blockchain infrastructure and data services for Cosmos ecosystem blockchains, offering APIs for accessing cross-chain data and IBC information.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Strangelove API key from [Strangelove Ventures](https://strangelove.ventures/)
|
||||
|
||||
2. Store the API key in Snowflakerets under `_FSC_SYS/STRANGELOVE`
|
||||
|
||||
3. Deploy the Strangelove marketplace functions:
|
||||
```bash
|
||||
dbt run --models strangelove__ strangelove_utils__strangelove_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `strangelove.get(path, query_args)`
|
||||
Make GET requests to Strangelove API endpoints.
|
||||
|
||||
### `strangelove.post(path, body)`
|
||||
Make POST requests to Strangelove API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get Cosmos network data
|
||||
SELECT strangelove.get('/api/v1/chains', {});
|
||||
|
||||
-- Get IBC transfer data
|
||||
SELECT strangelove.get('/api/v1/ibc/transfers', {'chain': 'cosmoshub', 'limit': 100});
|
||||
|
||||
-- Get validator information
|
||||
SELECT strangelove.get('/api/v1/validators', {'chain': 'osmosis'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Strangelove API Documentation](https://docs.strangelove.ventures/)
|
||||
@ -42,4 +42,4 @@
|
||||
'_FSC_SYS/STRANGELOVE'
|
||||
) as response
|
||||
|
||||
{% endmacro %}
|
||||
{% endmacro %}
|
||||
|
||||
45
macros/marketplace/subquery/README.md
Normal file
45
macros/marketplace/subquery/README.md
Normal file
@ -0,0 +1,45 @@
|
||||
# SubQuery API Integration
|
||||
|
||||
SubQuery provides decentralized data indexing infrastructure for Web3, offering APIs to access indexed blockchain data across multiple networks including Polkadot, Ethereum, and Cosmos.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your SubQuery API key from [SubQuery Managed Service](https://managedservice.subquery.network/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/SUBQUERY`
|
||||
|
||||
3. Deploy the SubQuery marketplace functions:
|
||||
```bash
|
||||
dbt run --models subquery__ subquery_utils__subquery_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `subquery.get(path, query_args)`
|
||||
Make GET requests to SubQuery API endpoints.
|
||||
|
||||
### `subquery.post(path, body)`
|
||||
Make POST requests to SubQuery GraphQL API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get indexed project data
|
||||
SELECT subquery.post('/graphql', {
|
||||
'query': 'query { transfers(first: 10) { id from to value } }'
|
||||
});
|
||||
|
||||
-- Get block information
|
||||
SELECT subquery.post('/graphql', {
|
||||
'query': 'query { blocks(first: 5, orderBy: NUMBER_DESC) { id number timestamp } }'
|
||||
});
|
||||
|
||||
-- Get account transactions
|
||||
SELECT subquery.post('/graphql', {
|
||||
'query': 'query { accounts(filter: {id: {equalTo: "address"}}) { id transactions { nodes { id } } } }'
|
||||
});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [SubQuery API Documentation](https://academy.subquery.network/)
|
||||
@ -19,4 +19,4 @@
|
||||
QUERY
|
||||
) as response
|
||||
|
||||
{% endmacro %}
|
||||
{% endmacro %}
|
||||
|
||||
36
macros/marketplace/topshot/README.md
Normal file
36
macros/marketplace/topshot/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# NBA Top Shot API Integration
|
||||
|
||||
NBA Top Shot is Dapper Labs' basketball NFT platform featuring officially licensed NBA highlights as digital collectible Moments.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your NBA Top Shot API key from [Dapper Labs Developer Portal](https://developers.dapperlabs.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/TOPSHOT`
|
||||
|
||||
3. Deploy the Top Shot marketplace functions:
|
||||
```bash
|
||||
dbt run --models topshot__ topshot_utils__topshot_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `topshot.get(path, query_args)`
|
||||
Make GET requests to NBA Top Shot API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get Top Shot collections
|
||||
SELECT topshot.get('/collections', {});
|
||||
|
||||
-- Get moment details
|
||||
SELECT topshot.get('/moments/12345', {});
|
||||
|
||||
-- Get marketplace listings
|
||||
SELECT topshot.get('/marketplace/listings', {'player': 'lebron-james', 'limit': 50});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [NBA Top Shot API Documentation](https://developers.dapperlabs.com/)
|
||||
39
macros/marketplace/transpose/README.md
Normal file
39
macros/marketplace/transpose/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Transpose API Integration
|
||||
|
||||
Transpose provides real-time blockchain data infrastructure with APIs for accessing NFT data, DeFi protocols, and on-chain analytics across multiple networks.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Transpose API key from [Transpose Dashboard](https://dashboard.transpose.io/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/TRANSPOSE`
|
||||
|
||||
3. Deploy the Transpose marketplace functions:
|
||||
```bash
|
||||
dbt run --models transpose__ transpose_utils__transpose_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `transpose.get(path, query_args)`
|
||||
Make GET requests to Transpose API endpoints.
|
||||
|
||||
### `transpose.post(path, body)`
|
||||
Make POST requests to Transpose API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get NFT collection data
|
||||
SELECT transpose.get('/v0/ethereum/collections/0x...', {});
|
||||
|
||||
-- Get account NFTs
|
||||
SELECT transpose.get('/v0/ethereum/nfts/by-owner', {'owner_address': '0x...', 'limit': 100});
|
||||
|
||||
-- Get token transfers
|
||||
SELECT transpose.get('/v0/ethereum/transfers', {'contract_address': '0x...', 'limit': 50});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Transpose API Documentation](https://docs.transpose.io/)
|
||||
36
macros/marketplace/zapper/README.md
Normal file
36
macros/marketplace/zapper/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# Zapper API Integration
|
||||
|
||||
Zapper provides DeFi portfolio tracking and analytics with APIs for accessing wallet balances, DeFi positions, transaction history, and yield farming opportunities.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Zapper API key from [Zapper API Portal](https://api.zapper.fi/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/ZAPPER`
|
||||
|
||||
3. Deploy the Zapper marketplace functions:
|
||||
```bash
|
||||
dbt run --models zapper__ zapper_utils__zapper_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `zapper.get(path, query_args)`
|
||||
Make GET requests to Zapper API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get wallet token balances
|
||||
SELECT zapper.get('/v2/balances', {'addresses[]': '0x...', 'networks[]': 'ethereum'});
|
||||
|
||||
-- Get DeFi protocol positions
|
||||
SELECT zapper.get('/v2/apps/tokens', {'groupId': 'uniswap-v2', 'addresses[]': '0x...'});
|
||||
|
||||
-- Get transaction history
|
||||
SELECT zapper.get('/v2/transactions', {'address': '0x...', 'network': 'ethereum'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Zapper API Documentation](https://docs.zapper.fi/)
|
||||
45
macros/marketplace/zettablock/README.md
Normal file
45
macros/marketplace/zettablock/README.md
Normal file
@ -0,0 +1,45 @@
|
||||
# ZettaBlock API Integration
|
||||
|
||||
ZettaBlock provides real-time blockchain data infrastructure with GraphQL APIs for accessing multi-chain data, analytics, and custom data indexing.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your ZettaBlock API key from [ZettaBlock Console](https://console.zettablock.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/ZETTABLOCK`
|
||||
|
||||
3. Deploy the ZettaBlock marketplace functions:
|
||||
```bash
|
||||
dbt run --models zettablock__ zettablock_utils__zettablock_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `zettablock.get(path, query_args)`
|
||||
Make GET requests to ZettaBlock API endpoints.
|
||||
|
||||
### `zettablock.post(path, body)`
|
||||
Make POST requests to ZettaBlock GraphQL API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get blockchain data via GraphQL
|
||||
SELECT zettablock.post('/graphql', {
|
||||
'query': 'query { ethereum { transactions(first: 10) { hash value gasPrice } } }'
|
||||
});
|
||||
|
||||
-- Get token information
|
||||
SELECT zettablock.post('/graphql', {
|
||||
'query': 'query { tokens(network: "ethereum", first: 20) { address symbol name } }'
|
||||
});
|
||||
|
||||
-- Get DeFi protocol data
|
||||
SELECT zettablock.post('/graphql', {
|
||||
'query': 'query { defi { protocols(first: 10) { name tvl volume24h } } }'
|
||||
});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [ZettaBlock API Documentation](https://docs.zettablock.com/)
|
||||
@ -33,3 +33,33 @@ test AS
|
||||
{%- endif -%}
|
||||
{%- endfor -%}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro base_test_udf_without_context(model, udf, args, assertions) %}
|
||||
{#
|
||||
Generates a test for a UDF without setting LIVEQUERY_CONTEXT.
|
||||
#}
|
||||
{%- set call -%}
|
||||
{{ target.database }}.{{ udf }}({{ args }})
|
||||
{%- endset -%}
|
||||
,
|
||||
test AS
|
||||
(
|
||||
SELECT
|
||||
'{{ udf }}' AS test_name
|
||||
,[{{ args }}] as parameters
|
||||
,{{ call }} AS result
|
||||
)
|
||||
{% for assertion in assertions %}
|
||||
SELECT
|
||||
test_name,
|
||||
parameters,
|
||||
result,
|
||||
$${{ assertion }}$$ AS assertion,
|
||||
$$SELECT {{ call ~ "\n" }};$$ AS sql
|
||||
FROM test
|
||||
WHERE NOT {{ assertion }}
|
||||
{%- if not loop.last %}
|
||||
UNION ALL
|
||||
{%- endif -%}
|
||||
{%- endfor -%}
|
||||
{%- endmacro -%}
|
||||
|
||||
31
macros/tests/udtfs.sql
Normal file
31
macros/tests/udtfs.sql
Normal file
@ -0,0 +1,31 @@
|
||||
{% macro base_test_udtf(model, udf, args, assertions) %}
|
||||
{#
|
||||
Generates a test for a User-Defined Table Function (UDTF).
|
||||
Unlike scalar UDFs, UDTFs return a table of results.
|
||||
#}
|
||||
{%- set call -%}
|
||||
SELECT * FROM TABLE({{ udf }}({{ args }}))
|
||||
{%- endset -%}
|
||||
|
||||
WITH test AS
|
||||
(
|
||||
SELECT
|
||||
'{{ udf }}' AS test_name
|
||||
,[{{ args }}] as parameters
|
||||
,t.*
|
||||
FROM TABLE({{ udf }}({{ args }})) t
|
||||
)
|
||||
|
||||
{% for assertion in assertions %}
|
||||
SELECT
|
||||
test_name,
|
||||
parameters,
|
||||
$${{ assertion }}$$ AS assertion,
|
||||
$${{ call }}$$ AS sql
|
||||
FROM test
|
||||
WHERE NOT {{ assertion }}
|
||||
{%- if not loop.last %}
|
||||
UNION ALL
|
||||
{%- endif -%}
|
||||
{%- endfor -%}
|
||||
{% endmacro %}
|
||||
67
macros/utils/udf_utils/render.sql
Normal file
67
macros/utils/udf_utils/render.sql
Normal file
@ -0,0 +1,67 @@
|
||||
{% macro get_rendered_model(package_name, model_name, schema, blockchain, network) %}
|
||||
{#
|
||||
This macro retrieves and renders a specified model from the graph.
|
||||
|
||||
Args:
|
||||
package_name (str): The name of the package containing the model.
|
||||
model_name (str): The name of the model to be rendered.
|
||||
schema (str): The schema to be used.
|
||||
blockchain (str): The blockchain to be used.
|
||||
network (str): The network to be used.
|
||||
|
||||
Returns:
|
||||
str: The rendered SQL of the specified model.
|
||||
#}
|
||||
{% if execute %}
|
||||
{{ log("=== Starting get_rendered_model ===", info=True) }}
|
||||
{# Use a list to store the node to avoid scope issues #}
|
||||
{%- set nodes = [] -%}
|
||||
{{ log("Looking for node: " ~ package_name ~ "." ~ model_name, info=True) }}
|
||||
{%- for node in graph.nodes.values() -%}
|
||||
{%- if node.package_name == package_name and node.name == model_name -%}
|
||||
{{ log("Found target node: " ~ node.unique_id, info=True) }}
|
||||
{%- do nodes.append(node) -%}
|
||||
{%- endif -%}
|
||||
{%- endfor -%}
|
||||
|
||||
{%- if nodes | length == 0 -%}
|
||||
{{ log("No target node found!", info=True) }}
|
||||
{{ return('') }}
|
||||
{%- endif -%}
|
||||
|
||||
{%- set target_node = nodes[0] -%}
|
||||
{{ log("Processing node: " ~ target_node.unique_id, info=True) }}
|
||||
{{ log("Dependencies:\n\t\t" ~ (target_node.depends_on.nodes | pprint).replace("\n", "\n\t\t"), info=True) }}
|
||||
|
||||
{# First render all dependency CTEs #}
|
||||
{%- set ctes = [] -%}
|
||||
{%- for dep_id in target_node.depends_on.nodes -%}
|
||||
{{ log("Processing dependency: " ~ dep_id, info=True) }}
|
||||
{%- set dep_node = graph.nodes[dep_id] -%}
|
||||
|
||||
{%- set rendered_sql = render(dep_node.raw_code) | trim -%}
|
||||
|
||||
{%- if rendered_sql -%}
|
||||
{%- set cte_sql -%}
|
||||
__dbt__cte__{{ dep_node.name }} AS (
|
||||
{{ rendered_sql }}
|
||||
)
|
||||
{%- endset -%}
|
||||
{%- do ctes.append(cte_sql) -%}
|
||||
{%- endif -%}
|
||||
{%- endfor -%}
|
||||
|
||||
{{ log("Number of CTEs generated: " ~ ctes | length, info=True) }}
|
||||
|
||||
{# Combine CTEs with main query #}
|
||||
{%- set final_sql -%}
|
||||
WITH {{ ctes | join(',\n\n') }}
|
||||
|
||||
{{ render(target_node.raw_code) }}
|
||||
{%- endset -%}
|
||||
|
||||
{{ log("=== End get_rendered_model ===\n\n" , info=True) }}
|
||||
|
||||
{{ return(final_sql) }}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
@ -2,6 +2,57 @@ version: 2
|
||||
models:
|
||||
- name: live
|
||||
columns:
|
||||
- name: udf_api_batched
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test__live_udf_api_batched_post_data_object
|
||||
args: |
|
||||
'GET',
|
||||
'https://httpbin.org/get',
|
||||
{'Content-Type': 'application/json'},
|
||||
{'param1': 'value1', 'param2': 'value2'},
|
||||
''
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.args is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.args:param1 = 'value1' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.args:param2 = 'value2' ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_batched_post_jsonrpc_ethereum_batch
|
||||
args: |
|
||||
'POST',
|
||||
'https://ethereum-rpc.publicnode.com',
|
||||
{'Content-Type': 'application/json'},
|
||||
[
|
||||
{'jsonrpc': '2.0', 'id': 1, 'method': 'eth_blockNumber', 'params': []},
|
||||
{'jsonrpc': '2.0', 'id': 2, 'method': 'eth_chainId', 'params': []}
|
||||
],
|
||||
''
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:id = 1 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:result is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:id = 2 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:result = '0x1' ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_batched_post_jsonrpc_solana
|
||||
args: |
|
||||
'POST',
|
||||
'https://api.mainnet-beta.solana.com',
|
||||
{'Content-Type': 'application/json'},
|
||||
{
|
||||
'jsonrpc': '2.0',
|
||||
'id': 1,
|
||||
'method': 'getVersion'
|
||||
},
|
||||
''
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.id = 1 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.result is not null ELSE true END
|
||||
- name: udf_api
|
||||
tests:
|
||||
- test_udf:
|
||||
@ -9,19 +60,369 @@ models:
|
||||
args: |
|
||||
'https://httpbin.org/post', {'foo': 'bar'}
|
||||
assertions:
|
||||
- result:data.json is not null
|
||||
- result:data.json = OBJECT_CONSTRUCT('foo', 'bar')
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json = OBJECT_CONSTRUCT('foo', 'bar') ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_post_data_array
|
||||
args: |
|
||||
'https://httpbin.org/post', ['foo', 'bar']
|
||||
assertions:
|
||||
- result:data.json is not null
|
||||
- result:data.json = ARRAY_CONSTRUCT('foo', 'bar')
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json = ARRAY_CONSTRUCT('foo', 'bar') ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_post_data_string
|
||||
args: |
|
||||
'https://httpbin.org/post', 'foo'::VARIANT
|
||||
assertions:
|
||||
- result:data.json is not null
|
||||
- result:data.json = 'foo'
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json = 'foo' ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_get_method
|
||||
args: |
|
||||
'https://httpbin.org/get'
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.url = 'https://httpbin.org/get' ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_get_with_params
|
||||
args: |
|
||||
'GET', 'https://httpbin.org/get', {'Content-Type': 'application/json'}, {'param1': 'value1', 'param2': 'value2'}
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.args is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.args:param1 = 'value1' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.args:param2 = 'value2' ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_post_batch_jsonrpc
|
||||
args: |
|
||||
'https://httpbin.org/post', {
|
||||
'jsonrpc': '2.0',
|
||||
'id': 1,
|
||||
'method': 'batch',
|
||||
'params': [
|
||||
{'id': 1, 'method': 'method1', 'params': {'param1': 'value1'}},
|
||||
{'id': 2, 'method': 'method2', 'params': {'param2': 'value2'}}
|
||||
]
|
||||
}
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json:jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json:id = 1 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json:method = 'batch' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json:params is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json:params[0]:id = 1 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json:params[1]:id = 2 ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_post_jsonrpc_solana
|
||||
args: |
|
||||
'POST',
|
||||
'https://api.mainnet-beta.solana.com',
|
||||
{'Content-Type': 'application/json'},
|
||||
{
|
||||
'jsonrpc': '2.0',
|
||||
'id': 1,
|
||||
'method': 'getVersion'
|
||||
},
|
||||
''
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.id = 1 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.result is not null ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_post_jsonrpc_solana_batch
|
||||
args: |
|
||||
'POST',
|
||||
'https://api.mainnet-beta.solana.com',
|
||||
{'Content-Type': 'application/json'},
|
||||
[
|
||||
{'jsonrpc': '2.0', 'id': 1, 'method': 'getVersion'},
|
||||
{'jsonrpc': '2.0', 'id': 2, 'method': 'getVersion'}
|
||||
],
|
||||
''
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:id = 1 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:result is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:id = 2 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:result is not null ELSE true END
|
||||
|
||||
- test_udf:
|
||||
name: test__live_udf_api_post_jsonrpc_ethereum_batch
|
||||
args: |
|
||||
'POST',
|
||||
'https://ethereum-rpc.publicnode.com',
|
||||
{'Content-Type': 'application/json'},
|
||||
[
|
||||
{'jsonrpc': '2.0', 'id': 1, 'method': 'eth_blockNumber', 'params': []},
|
||||
{'jsonrpc': '2.0', 'id': 2, 'method': 'eth_chainId', 'params': []}
|
||||
],
|
||||
''
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:id = 1 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:result is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:id = 2 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:result = '0x1' ELSE true END
|
||||
|
||||
- name: udf_api_v2
|
||||
tests:
|
||||
# Convenience overloads (always sync)
|
||||
- test_udf:
|
||||
name: test__live_udf_api_v2_post_data_object_sync
|
||||
args: |
|
||||
'https://httpbin.org/post', {'foo': 'bar'}
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json = OBJECT_CONSTRUCT('foo', 'bar') ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_v2_post_data_array_sync
|
||||
args: |
|
||||
'https://httpbin.org/post', ['foo', 'bar']
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json = ARRAY_CONSTRUCT('foo', 'bar') ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_v2_post_data_string_sync
|
||||
args: |
|
||||
'https://httpbin.org/post', 'foo'::VARIANT
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json = 'foo' ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_v2_get_method_sync
|
||||
args: |
|
||||
'https://httpbin.org/get'
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.url = 'https://httpbin.org/get' ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_v2_post_batch_jsonrpc_sync
|
||||
args: |
|
||||
'https://httpbin.org/post', {
|
||||
'jsonrpc': '2.0',
|
||||
'id': 1,
|
||||
'method': 'batch',
|
||||
'params': [
|
||||
{'id': 1, 'method': 'method1', 'params': {'param1': 'value1'}},
|
||||
{'id': 2, 'method': 'method2', 'params': {'param2': 'value2'}}
|
||||
]
|
||||
}
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json:jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json:id = 1 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json:method = 'batch' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json:params is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json:params[0]:id = 1 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.json:params[1]:id = 2 ELSE true END
|
||||
|
||||
# Full signature tests - SYNC mode
|
||||
- test_udf:
|
||||
name: test__live_udf_api_v2_get_with_params_sync
|
||||
args: |
|
||||
'GET', 'https://httpbin.org/get', {'Content-Type': 'application/json'}, {'param1': 'value1', 'param2': 'value2'}, FALSE
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.args is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.args:param1 = 'value1' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.args:param2 = 'value2' ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_v2_post_jsonrpc_solana_sync
|
||||
args: |
|
||||
'POST',
|
||||
'https://api.mainnet-beta.solana.com',
|
||||
{'Content-Type': 'application/json'},
|
||||
{
|
||||
'jsonrpc': '2.0',
|
||||
'id': 1,
|
||||
'method': 'getVersion'
|
||||
},
|
||||
FALSE
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.id = 1 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.result is not null ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_v2_post_jsonrpc_solana_batch_sync
|
||||
args: |
|
||||
'POST',
|
||||
'https://api.mainnet-beta.solana.com',
|
||||
{'Content-Type': 'application/json'},
|
||||
[
|
||||
{'jsonrpc': '2.0', 'id': 1, 'method': 'getVersion'},
|
||||
{'jsonrpc': '2.0', 'id': 2, 'method': 'getVersion'}
|
||||
],
|
||||
FALSE
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:id = 1 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:result is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:id = 2 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:result is not null ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_v2_post_jsonrpc_ethereum_batch_sync
|
||||
args: |
|
||||
'POST',
|
||||
'https://ethereum-rpc.publicnode.com',
|
||||
{'Content-Type': 'application/json'},
|
||||
[
|
||||
{'jsonrpc': '2.0', 'id': 1, 'method': 'eth_blockNumber', 'params': []},
|
||||
{'jsonrpc': '2.0', 'id': 2, 'method': 'eth_chainId', 'params': []}
|
||||
],
|
||||
FALSE
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:id = 1 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:result is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:id = 2 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:result = '0x1' ELSE true END
|
||||
|
||||
# Full signature tests - ASYNC mode
|
||||
- test_udf:
|
||||
name: test__live_udf_api_v2_get_with_params_async
|
||||
args: |
|
||||
'GET', 'https://httpbin.org/get', {'Content-Type': 'application/json'}, {'param1': 'value1', 'param2': 'value2'}, TRUE
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.args is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.args:param1 = 'value1' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.args:param2 = 'value2' ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_v2_post_jsonrpc_solana_async
|
||||
args: |
|
||||
'POST',
|
||||
'https://api.mainnet-beta.solana.com',
|
||||
{'Content-Type': 'application/json'},
|
||||
{
|
||||
'jsonrpc': '2.0',
|
||||
'id': 1,
|
||||
'method': 'getVersion'
|
||||
},
|
||||
TRUE
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.id = 1 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.result is not null ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_v2_post_jsonrpc_solana_batch_async
|
||||
args: |
|
||||
'POST',
|
||||
'https://api.mainnet-beta.solana.com',
|
||||
{'Content-Type': 'application/json'},
|
||||
[
|
||||
{'jsonrpc': '2.0', 'id': 1, 'method': 'getVersion'},
|
||||
{'jsonrpc': '2.0', 'id': 2, 'method': 'getVersion'}
|
||||
],
|
||||
TRUE
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:id = 1 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:result is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:id = 2 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:result is not null ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_v2_post_jsonrpc_ethereum_batch_async
|
||||
args: |
|
||||
'POST',
|
||||
'https://ethereum-rpc.publicnode.com',
|
||||
{'Content-Type': 'application/json'},
|
||||
[
|
||||
{'jsonrpc': '2.0', 'id': 1, 'method': 'eth_blockNumber', 'params': []},
|
||||
{'jsonrpc': '2.0', 'id': 2, 'method': 'eth_chainId', 'params': []}
|
||||
],
|
||||
TRUE
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:id = 1 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[0]:result is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:id = 2 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data[1]:result = '0x1' ELSE true END
|
||||
|
||||
# Explicit is_async boolean parameter tests
|
||||
- test_udf:
|
||||
name: test__live_udf_api_v2_get_with_headers_is_async_true
|
||||
args: |
|
||||
'https://httpbin.org/get', {'Content-Type': 'application/json'}, '', true
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.url = 'https://httpbin.org/get' ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_v2_get_with_headers_is_async_false
|
||||
args: |
|
||||
'https://httpbin.org/get', {'Content-Type': 'application/json'}, '', false
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.url = 'https://httpbin.org/get' ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_v2_full_signature_is_async_true
|
||||
args: |
|
||||
'GET', 'https://httpbin.org/get', {'Content-Type': 'application/json'}, {'param1': 'value1'}, '', true
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.args is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.args:param1 = 'value1' ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_v2_full_signature_is_async_false
|
||||
args: |
|
||||
'GET', 'https://httpbin.org/get', {'Content-Type': 'application/json'}, {'param1': 'value1'}, '', false
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.args is not null ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.args:param1 = 'value1' ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_v2_post_jsonrpc_is_async_true
|
||||
args: |
|
||||
'POST',
|
||||
'https://api.mainnet-beta.solana.com',
|
||||
{'Content-Type': 'application/json'},
|
||||
{
|
||||
'jsonrpc': '2.0',
|
||||
'id': 1,
|
||||
'method': 'getVersion'
|
||||
},
|
||||
'',
|
||||
true
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.id = 1 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.result is not null ELSE true END
|
||||
- test_udf:
|
||||
name: test__live_udf_api_v2_post_jsonrpc_is_async_false
|
||||
args: |
|
||||
'POST',
|
||||
'https://api.mainnet-beta.solana.com',
|
||||
{'Content-Type': 'application/json'},
|
||||
{
|
||||
'jsonrpc': '2.0',
|
||||
'id': 1,
|
||||
'method': 'getVersion'
|
||||
},
|
||||
'',
|
||||
false
|
||||
assertions:
|
||||
- result:status_code IN (200, 502, 503)
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.jsonrpc = '2.0' ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.id = 1 ELSE true END
|
||||
- CASE WHEN result:status_code = 200 THEN result:data.result is not null ELSE true END
|
||||
|
||||
@ -1,45 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: secrets
|
||||
columns:
|
||||
- name: udf_register_secret
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_secrets__udf_register_secret
|
||||
args: >
|
||||
'test', 'test'
|
||||
assertions:
|
||||
- result = {'request_id':'test','success':false}
|
||||
- name: udf_get_secret
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_secrets__udf_get_secret
|
||||
args: >
|
||||
'test'
|
||||
assertions:
|
||||
- >
|
||||
result::OBJECT = {'key': 'value'}
|
||||
- name: udf_get_secrets
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_secrets__udf_get_secrets
|
||||
args: ''
|
||||
assertions:
|
||||
- >
|
||||
result = {'test': {'key': 'value'}}
|
||||
- name: udf_create_secret
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_secrets__udf_create_secret
|
||||
args: >
|
||||
'test', {'key': 'value'}
|
||||
assertions:
|
||||
- result = 200
|
||||
- name: udf_delete_secret
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_secrets__udf_delete_secret
|
||||
args: >
|
||||
'test_delete'
|
||||
assertions:
|
||||
- result = 200
|
||||
@ -5,6 +5,7 @@ models:
|
||||
- name: udf_json_rpc_call
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test__utils_udf_json_rpc_call
|
||||
args: "'foo', [], 1"
|
||||
assertions:
|
||||
- >
|
||||
@ -17,182 +18,182 @@ models:
|
||||
- name: udf_urlencode
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_true_1
|
||||
name: test__utils_udf_urlencode_dict_true_1
|
||||
args: >
|
||||
{'a':'b'}, TRUE
|
||||
assertions:
|
||||
- result = 'a=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_true_2
|
||||
name: test__utils_udf_urlencode_dict_true_2
|
||||
args: >
|
||||
{'a':'a', 'b':'b'}, TRUE
|
||||
assertions:
|
||||
- result = 'a=a&b=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_true_space
|
||||
name: test__utils_udf_urlencode_dict_true_space
|
||||
args: >
|
||||
{'space': 'abc 123'}, TRUE
|
||||
assertions:
|
||||
- result = 'space=abc+123'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_true_special
|
||||
name: test__utils_udf_urlencode_dict_true_special
|
||||
args: >
|
||||
{'special!': ' !@#$,+"'}, TRUE
|
||||
assertions:
|
||||
- result = 'special%21=+%21%40%23%24%2C%2B%22'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_true_array
|
||||
name: test__utils_udf_urlencode_dict_true_array
|
||||
args: >
|
||||
{'array': [0, 1, 2]}, TRUE
|
||||
assertions:
|
||||
- result = 'array=0&array=1&array=2'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_false_1
|
||||
name: test__utils_udf_urlencode_dict_false_1
|
||||
args: >
|
||||
{'a':'b'}, FALSE
|
||||
assertions:
|
||||
- result = 'a=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_false_2
|
||||
name: test__utils_udf_urlencode_dict_false_2
|
||||
args: >
|
||||
{'a':'b', 'b':'b'}, FALSE
|
||||
assertions:
|
||||
- result = 'a=b&b=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_false_space
|
||||
name: test__utils_udf_urlencode_dict_false_space
|
||||
args: >
|
||||
{'space': 'abc 123'}, FALSE
|
||||
assertions:
|
||||
- result = 'space=abc+123'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_false_special
|
||||
name: test__utils_udf_urlencode_dict_false_special
|
||||
args: >
|
||||
{'special!': ' !@#$,+"'}, FALSE
|
||||
assertions:
|
||||
- result = 'special%21=+%21%40%23%24%2C%2B%22'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_false_array
|
||||
name: test__utils_udf_urlencode_dict_false_array
|
||||
args: >
|
||||
{'array': [0, 1, 2]}, FALSE
|
||||
assertions:
|
||||
- result = 'array=%5B0%2C+1%2C+2%5D'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_1
|
||||
name: test__utils_udf_urlencode_dict_1
|
||||
args: >
|
||||
{'a':'b'}
|
||||
assertions:
|
||||
- result = 'a=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_2
|
||||
name: test__utils_udf_urlencode_dict_2
|
||||
args: >
|
||||
{'a':'b', 'b':'b'}
|
||||
assertions:
|
||||
- result = 'a=b&b=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_space
|
||||
name: test__utils_udf_urlencode_dict_space
|
||||
args: >
|
||||
{'space': 'abc 123'}
|
||||
assertions:
|
||||
- result = 'space=abc+123'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_special
|
||||
name: test__utils_udf_urlencode_dict_special
|
||||
args: >
|
||||
{'special!': ' !@#$,+"'}
|
||||
assertions:
|
||||
- result = 'special%21=+%21%40%23%24%2C%2B%22'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_array
|
||||
name: test__utils_udf_urlencode_dict_array
|
||||
args: >
|
||||
{'array': [0, 1, 2]}
|
||||
assertions:
|
||||
- result = 'array=%5B0%2C+1%2C+2%5D'
|
||||
# write tests but use arrays of arrays instead of dictionaries
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_true_1
|
||||
name: test__utils_udf_urlencode_array_true_1
|
||||
args: >
|
||||
[['a', 'b']], TRUE
|
||||
assertions:
|
||||
- result = 'a=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_true_2
|
||||
name: test__utils_udf_urlencode_array_true_2
|
||||
args: >
|
||||
[['a', 'a'], ['b', 'b']], TRUE
|
||||
assertions:
|
||||
- result = 'a=a&b=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_true_space
|
||||
name: test__utils_udf_urlencode_array_true_space
|
||||
args: >
|
||||
[['space', 'abc 123']], TRUE
|
||||
assertions:
|
||||
- result = 'space=abc+123'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_true_special
|
||||
name: test__utils_udf_urlencode_array_true_special
|
||||
args: >
|
||||
[['special!', ' !@#$,+"']], TRUE
|
||||
assertions:
|
||||
- result = 'special%21=+%21%40%23%24%2C%2B%22'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_true_array
|
||||
name: test__utils_udf_urlencode_array_true_array
|
||||
args: >
|
||||
[['array', [0, 1, 2]]], TRUE
|
||||
assertions:
|
||||
- result = 'array=0&array=1&array=2'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_false_1
|
||||
name: test__utils_udf_urlencode_array_false_1
|
||||
args: >
|
||||
[['a', 'b']], FALSE
|
||||
assertions:
|
||||
- result = 'a=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_false_2
|
||||
name: test__utils_udf_urlencode_array_false_2
|
||||
args: >
|
||||
[['a', 'a'], ['b', 'b']], FALSE
|
||||
assertions:
|
||||
- result = 'a=a&b=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_false_space
|
||||
name: test__utils_udf_urlencode_array_false_space
|
||||
args: >
|
||||
[['space', 'abc 123']], FALSE
|
||||
assertions:
|
||||
- result = 'space=abc+123'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_false_special
|
||||
name: test__utils_udf_urlencode_array_false_special
|
||||
args: >
|
||||
[['special!', ' !@#$,+"']], FALSE
|
||||
assertions:
|
||||
- result = 'special%21=+%21%40%23%24%2C%2B%22'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_false_array
|
||||
name: test__utils_udf_urlencode_array_false_array
|
||||
args: >
|
||||
[['array', [0, 1, 2]]], FALSE
|
||||
assertions:
|
||||
- result = 'array=%5B0%2C+1%2C+2%5D'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_1
|
||||
name: test__utils_udf_urlencode_array_1
|
||||
args: >
|
||||
[['a', 'b']]
|
||||
assertions:
|
||||
- result = 'a=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_2
|
||||
name: test__utils_udf_urlencode_array_2
|
||||
args: >
|
||||
[['a', 'a'], ['b', 'b']]
|
||||
assertions:
|
||||
- result = 'a=a&b=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_space
|
||||
name: test__utils_udf_urlencode_array_space
|
||||
args: >
|
||||
[['space', 'abc 123']]
|
||||
assertions:
|
||||
- result = 'space=abc+123'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_special
|
||||
name: test__utils_udf_urlencode_array_special
|
||||
args: >
|
||||
[['special!', ' !@#$,+"']]
|
||||
assertions:
|
||||
- result = 'special%21=+%21%40%23%24%2C%2B%22'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_array
|
||||
name: test__utils_udf_urlencode_array_array
|
||||
args: >
|
||||
[['array', [0, 1, 2]]]
|
||||
assertions:
|
||||
@ -200,35 +201,35 @@ models:
|
||||
- name: udf_int_to_binary
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_utils__udf_int_to_binary
|
||||
name: test__utils_udf_int_to_binary
|
||||
args: 123456789
|
||||
assertions:
|
||||
- result = '111010110111100110100010101'
|
||||
- name: udf_int_to_binary
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_utils__udf_int_to_binary_large_number
|
||||
name: test__utils_udf_int_to_binary_large_number
|
||||
args: "'123456789123456789123456789123456789123456789'"
|
||||
assertions:
|
||||
- result = '101100010010011011011100101001111010001001110011010111111101111101010111011001001101000001111110001010100001011011010000100000001000101111100010101'
|
||||
- name: udf_binary_to_int
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_utils__udf_binary_to_int
|
||||
name: test__utils_udf_binary_to_int
|
||||
args: '111010110111100110100010101'
|
||||
assertions:
|
||||
- result = '123456789'
|
||||
- name: udf_binary_to_int
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_utils__udf_binary_to_int_large_number
|
||||
name: test__utils_udf_binary_to_int_large_number
|
||||
args: "'110110110100110110100101110101100110100000101111100010101'"
|
||||
assertions:
|
||||
- result = '123456789123456789'
|
||||
- name: udf_evm_decode_log
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_utils__udf_evm_decode_log
|
||||
name: test__utils_udf_evm_decode_log
|
||||
args: >
|
||||
{
|
||||
'anonymous': false,
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
-- depends_on: {{ ref('live') }}
|
||||
|
||||
{%- set configs = [
|
||||
config_alchemy_nfts_udfs,
|
||||
] -%}
|
||||
{{- ephemeral_deploy_marketplace(configs) -}}
|
||||
-- depends_on: {{ ref('alchemy_utils__alchemy_utils') }}
|
||||
-- depends_on: {{ ref('live') }}
|
||||
@ -1,6 +1,7 @@
|
||||
-- depends_on: {{ ref('live') }}
|
||||
|
||||
{%- set configs = [
|
||||
config_alchemy_tokens_udfs,
|
||||
] -%}
|
||||
{{- ephemeral_deploy_marketplace(configs) -}}
|
||||
-- depends_on: {{ ref('alchemy_utils__alchemy_utils') }}
|
||||
-- depends_on: {{ ref('live') }}
|
||||
@ -1,6 +1,7 @@
|
||||
-- depends_on: {{ ref('live') }}
|
||||
|
||||
{%- set configs = [
|
||||
config_alchemy_transfers_udfs,
|
||||
] -%}
|
||||
{{- ephemeral_deploy_marketplace(configs) -}}
|
||||
-- depends_on: {{ ref('alchemy_utils__alchemy_utils') }}
|
||||
-- depends_on: {{ ref('live') }}
|
||||
|
||||
@ -8,7 +8,7 @@ models:
|
||||
name: test_apilayer__get_status_200
|
||||
args: >
|
||||
'/odds/sports'
|
||||
, {}
|
||||
, {'all': 'true'}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
|
||||
8
models/deploy/marketplace/claude/claude__.sql
Normal file
8
models/deploy/marketplace/claude/claude__.sql
Normal file
@ -0,0 +1,8 @@
|
||||
-- depends_on: {{ ref('live') }}
|
||||
-- depends_on: {{ ref('claude_utils__claude_utils') }}
|
||||
{%- set configs = [
|
||||
config_claude_messages_udfs,
|
||||
config_claude_models_udfs,
|
||||
config_claude_messages_batch_udfs
|
||||
] -%}
|
||||
{{- ephemeral_deploy_marketplace(configs) -}}
|
||||
151
models/deploy/marketplace/claude/claude__.yml
Normal file
151
models/deploy/marketplace/claude/claude__.yml
Normal file
@ -0,0 +1,151 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: claude__
|
||||
columns:
|
||||
- name: post_messages
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_claude__post_messages_only_one_message
|
||||
args: >
|
||||
[{'role': 'user', 'content': 'Hello, how are you?'}]
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
- test_udf:
|
||||
name: test_claude__post_messages_with_model
|
||||
args: >
|
||||
'claude-3-5-sonnet-20241022',
|
||||
[{'role': 'user', 'content': 'Hello, how are you?'}],
|
||||
1024
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- test_udf:
|
||||
name: test_claude__post_messages_with_all_params
|
||||
args: >
|
||||
'claude-3-5-sonnet-20241022',
|
||||
[{'role': 'user', 'content': 'Hello, how are you?'}],
|
||||
1024,
|
||||
0.5,
|
||||
10,
|
||||
0.95,
|
||||
'You are a helpful assistant.'
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
|
||||
- name: count_message_tokens
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_claude__count_message_tokens
|
||||
args: >
|
||||
'claude-3-5-sonnet-20241022',
|
||||
[{'role': 'user', 'content': 'Hello, how are you?'}]
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
|
||||
- name: list_models
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_claude__list_models
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
|
||||
- name: get_model
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_claude__get_model
|
||||
args: >
|
||||
'claude-3-5-sonnet-20241022'
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
|
||||
- name: post_messages_batch
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_claude__post_messages_batch
|
||||
args: >
|
||||
{
|
||||
'requests': [
|
||||
{
|
||||
'custom_id': 'test_1',
|
||||
'params': {
|
||||
'model': 'claude-3-5-sonnet-20241022',
|
||||
'max_tokens': 100,
|
||||
'messages': [{'role': 'user', 'content': 'Say hello'}]
|
||||
}
|
||||
},
|
||||
{
|
||||
'custom_id': 'test_2',
|
||||
'params': {
|
||||
'model': 'claude-3-5-sonnet-20241022',
|
||||
'max_tokens': 100,
|
||||
'messages': [{'role': 'user', 'content': 'Say goodbye'}]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
- result:data:id IS NOT NULL
|
||||
- result:data:type = 'message_batch'
|
||||
|
||||
- name: list_message_batches
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_claude__list_message_batches
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
- result:data IS NOT NULL
|
||||
|
||||
# Skip pagination tests that require valid batch IDs
|
||||
- name: list_message_batches_with_before
|
||||
tests:
|
||||
- test_udf:
|
||||
config:
|
||||
enabled: false
|
||||
name: test_claude__list_message_batches_with_before_disabled
|
||||
args: >
|
||||
null,
|
||||
5
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
|
||||
- name: list_message_batches_with_after
|
||||
tests:
|
||||
- test_udf:
|
||||
config:
|
||||
enabled: false
|
||||
name: test_claude__list_message_batches_with_after_disabled
|
||||
args: >
|
||||
null,
|
||||
5
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
|
||||
# Skip individual batch access tests that require valid batch IDs
|
||||
- name: get_message_batch
|
||||
tests:
|
||||
- test_udf:
|
||||
config:
|
||||
enabled: false
|
||||
name: test_claude__get_message_batch_disabled
|
||||
args: >
|
||||
'msgbatch_test'
|
||||
assertions:
|
||||
- result:status_code = 404
|
||||
|
||||
- name: get_message_batch_results
|
||||
tests:
|
||||
- test_udf:
|
||||
config:
|
||||
enabled: false
|
||||
name: test_claude__get_message_batch_results_disabled
|
||||
args: >
|
||||
'msgbatch_test'
|
||||
assertions:
|
||||
- result:status_code = 404
|
||||
@ -0,0 +1,5 @@
|
||||
-- depends_on: {{ ref('live') }}
|
||||
{%- set configs = [
|
||||
config_claude_utils_udfs,
|
||||
] -%}
|
||||
{{- ephemeral_deploy_marketplace(configs) -}}
|
||||
@ -0,0 +1,18 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: claude_utils__claude_utils
|
||||
columns:
|
||||
- name: post_api
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_claude_utils__post_api_status_200
|
||||
args: >
|
||||
'/v1/messages'
|
||||
, {
|
||||
'model': 'claude-3-5-sonnet-20241022',
|
||||
'max_tokens': 1024,
|
||||
'messages': [{'role': 'user', 'content': 'Hello, how are you?'}]
|
||||
}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
@ -8,7 +8,7 @@ models:
|
||||
name: test_cmc__get_status_200
|
||||
args: >
|
||||
'/v2/cryptocurrency/ohlcv/historical'
|
||||
, {'interval': 'hourly', 'time_period': 'hourly', 'time_start': 1691524740, 'time_end': 1691528400, 'id': '15478,15479'}
|
||||
, {'interval': 'hourly', 'time_period': 'hourly', 'time_start': DATE_PART('EPOCH_SECOND', DATEADD('hour', -2, CURRENT_TIMESTAMP())), 'time_end': DATE_PART('EPOCH_SECOND', DATEADD('hour', -1, CURRENT_TIMESTAMP())), 'id': '15478,15479'}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
|
||||
@ -7,7 +7,7 @@ models:
|
||||
- test_udf:
|
||||
name: test_dappradar__get_status_200
|
||||
args: >
|
||||
'/4tsxo4vuhotaojtl/defi/chains'
|
||||
'/v2/defi/chains'
|
||||
, {}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
|
||||
@ -7,7 +7,7 @@ models:
|
||||
- test_udf:
|
||||
name: test_defillama__get_status_200
|
||||
args: >
|
||||
'/protocols'
|
||||
'/categories'
|
||||
, {}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
|
||||
@ -7,13 +7,13 @@ models:
|
||||
- test_udf:
|
||||
name: test_footprint_utils__get_status_200_v2_protocol_info
|
||||
args: >
|
||||
'/v2/protocol/info'
|
||||
'/v3/protocol/getProtocolStatsHistory'
|
||||
, {
|
||||
'chain': 'Ethereum',
|
||||
'protocol_slug': 'the-sandbox',
|
||||
'limit': 10,
|
||||
'offset': 0
|
||||
}
|
||||
'chain': 'Polygon',
|
||||
'protocol_slug': 'planet-ix',
|
||||
'start_time': '2023-07-01',
|
||||
'end_time': '2023-07-25'
|
||||
}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user