mirror of
https://github.com/FlipsideCrypto/livequery-models.git
synced 2026-02-06 10:56:46 +00:00
Update GitHub Actions workflow to reduce thread count and add extensive API integration documentation
- Changed thread count from 24 to 5 in GitHub Actions workflows for improved performance. - Added comprehensive README files for various API integrations including Alchemy, NBA All Day, API Layer, Binance, and more. - Introduced new UDFs and UDTFs for Groq and Slack API integrations, enhancing functionality and usability. - Implemented tests for new UDFs and UDTFs to ensure reliability and correctness. - Updated existing UDF definitions and added new tests for enhanced coverage and robustness.
This commit is contained in:
parent
a3b004d0cc
commit
629dfe077d
6
.github/workflows/dbt_udf_test.yml
vendored
6
.github/workflows/dbt_udf_test.yml
vendored
@ -41,7 +41,7 @@ jobs:
|
||||
with:
|
||||
warehouse: ${{ vars.WAREHOUSE }}
|
||||
environment: prod
|
||||
command: dbt test --selector test_udfs --threads 24
|
||||
command: dbt test --selector test_udfs --threads 5
|
||||
|
||||
dispatched:
|
||||
uses: ./.github/workflows/dbt.yml
|
||||
@ -50,7 +50,7 @@ jobs:
|
||||
with:
|
||||
warehouse: ${{ inputs.warehouse }}
|
||||
environment: ${{ inputs.environment }}
|
||||
command: dbt test --selector test_udfs --threads 24
|
||||
command: dbt test --selector test_udfs --threads 5
|
||||
|
||||
pull_request:
|
||||
uses: ./.github/workflows/dbt.yml
|
||||
@ -59,4 +59,4 @@ jobs:
|
||||
with:
|
||||
warehouse: ${{ vars.WAREHOUSE }}
|
||||
environment: dev
|
||||
command: dbt test --selector test_udfs --threads 24
|
||||
command: dbt test --selector test_udfs --threads 5
|
||||
|
||||
288
macros/marketplace/alchemy/README.md
Normal file
288
macros/marketplace/alchemy/README.md
Normal file
@ -0,0 +1,288 @@
|
||||
# Alchemy API Integration
|
||||
|
||||
Comprehensive blockchain data integration using Alchemy's powerful APIs for NFTs, tokens, transfers, and RPC calls across multiple networks.
|
||||
|
||||
## Supported Networks
|
||||
|
||||
- **Ethereum** (`eth-mainnet`)
|
||||
- **Polygon** (`polygon-mainnet`)
|
||||
- **Arbitrum** (`arb-mainnet`)
|
||||
- **Optimism** (`opt-mainnet`)
|
||||
- **Base** (`base-mainnet`)
|
||||
- **And more** - Check [Alchemy's documentation](https://docs.alchemy.com/reference/api-overview) for the latest supported networks
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Alchemy API key from [Alchemy Dashboard](https://dashboard.alchemy.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/ALCHEMY`
|
||||
|
||||
3. Deploy the Alchemy marketplace functions:
|
||||
```bash
|
||||
dbt run --models alchemy__ alchemy_utils__alchemy_utils
|
||||
```
|
||||
|
||||
## Core Functions
|
||||
|
||||
### Utility Functions (`alchemy_utils` schema)
|
||||
|
||||
#### `alchemy_utils.nfts_get(network, path, query_args)`
|
||||
Make GET requests to Alchemy NFT API endpoints.
|
||||
|
||||
#### `alchemy_utils.nfts_post(network, path, body)`
|
||||
Make POST requests to Alchemy NFT API endpoints.
|
||||
|
||||
#### `alchemy_utils.rpc(network, method, params)`
|
||||
Make RPC calls to blockchain networks via Alchemy.
|
||||
|
||||
### NFT Functions (`alchemy` schema)
|
||||
|
||||
#### `alchemy.get_nfts_for_owner(network, owner[, query_args])`
|
||||
Get all NFTs owned by an address.
|
||||
|
||||
#### `alchemy.get_nft_metadata(network, contract_address, token_id)`
|
||||
Get metadata for a specific NFT.
|
||||
|
||||
#### `alchemy.get_nfts_for_collection(network, contract_address[, query_args])`
|
||||
Get all NFTs in a collection.
|
||||
|
||||
#### `alchemy.get_owners_for_nft(network, contract_address, token_id)`
|
||||
Get all owners of a specific NFT.
|
||||
|
||||
### Token Functions
|
||||
|
||||
#### `alchemy.get_token_balances(network, owner[, contract_addresses])`
|
||||
Get token balances for an address.
|
||||
|
||||
#### `alchemy.get_token_metadata(network, contract_address)`
|
||||
Get metadata for a token contract.
|
||||
|
||||
### Transfer Functions
|
||||
|
||||
#### `alchemy.get_asset_transfers(network, query_args)`
|
||||
Get asset transfer data with flexible filtering.
|
||||
|
||||
## Examples
|
||||
|
||||
### NFT Queries
|
||||
|
||||
#### Get NFTs for Owner
|
||||
```sql
|
||||
-- Get all NFTs owned by an address
|
||||
SELECT alchemy.get_nfts_for_owner(
|
||||
'eth-mainnet',
|
||||
'0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b'
|
||||
);
|
||||
|
||||
-- With pagination and filtering
|
||||
SELECT alchemy.get_nfts_for_owner(
|
||||
'eth-mainnet',
|
||||
'0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b',
|
||||
{
|
||||
'pageSize': 100,
|
||||
'contractAddresses': ['0xBC4CA0EdA7647A8aB7C2061c2E118A18a936f13D'] -- BAYC
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
#### Get NFT Metadata
|
||||
```sql
|
||||
-- Get metadata for specific NFT
|
||||
SELECT alchemy.get_nft_metadata(
|
||||
'eth-mainnet',
|
||||
'0xBC4CA0EdA7647A8aB7C2061c2E118A18a936f13D', -- BAYC contract
|
||||
'1234' -- Token ID
|
||||
);
|
||||
```
|
||||
|
||||
#### Get Collection NFTs
|
||||
```sql
|
||||
-- Get all NFTs in a collection
|
||||
SELECT alchemy.get_nfts_for_collection(
|
||||
'eth-mainnet',
|
||||
'0x60E4d786628Fea6478F785A6d7e704777c86a7c6', -- MAYC
|
||||
{
|
||||
'pageSize': 50,
|
||||
'startToken': '0'
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
### Token Queries
|
||||
|
||||
#### Get Token Balances
|
||||
```sql
|
||||
-- Get all token balances for an address
|
||||
SELECT alchemy.get_token_balances(
|
||||
'eth-mainnet',
|
||||
'0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b'
|
||||
);
|
||||
|
||||
-- Get specific token balances
|
||||
SELECT alchemy.get_token_balances(
|
||||
'eth-mainnet',
|
||||
'0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b',
|
||||
['0xA0b86a33E6417e8EdcfCfdD8fb59a3A5b3dB8BFD'] -- USDC
|
||||
);
|
||||
```
|
||||
|
||||
#### Get Token Metadata
|
||||
```sql
|
||||
-- Get token contract information
|
||||
SELECT alchemy.get_token_metadata(
|
||||
'eth-mainnet',
|
||||
'0xA0b86a33E6417e8EdcfCfdD8fb59a3A5b3dB8BFD' -- USDC
|
||||
);
|
||||
```
|
||||
|
||||
### Transfer Analysis
|
||||
|
||||
#### Asset Transfers
|
||||
```sql
|
||||
-- Get recent transfers for an address
|
||||
SELECT alchemy.get_asset_transfers(
|
||||
'eth-mainnet',
|
||||
{
|
||||
'fromAddress': '0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b',
|
||||
'category': ['erc721', 'erc1155'],
|
||||
'maxCount': 100
|
||||
}
|
||||
);
|
||||
|
||||
-- Get transfers for a specific contract
|
||||
SELECT alchemy.get_asset_transfers(
|
||||
'eth-mainnet',
|
||||
{
|
||||
'contractAddresses': ['0xBC4CA0EdA7647A8aB7C2061c2E118A18a936f13D'],
|
||||
'category': ['erc721'],
|
||||
'fromBlock': '0x12A05F200',
|
||||
'toBlock': 'latest'
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
### RPC Calls
|
||||
|
||||
#### Direct Blockchain Queries
|
||||
```sql
|
||||
-- Get latest block number
|
||||
SELECT alchemy_utils.rpc(
|
||||
'eth-mainnet',
|
||||
'eth_blockNumber',
|
||||
[]
|
||||
);
|
||||
|
||||
-- Get block by number
|
||||
SELECT alchemy_utils.rpc(
|
||||
'eth-mainnet',
|
||||
'eth_getBlockByNumber',
|
||||
['0x12A05F200', true]
|
||||
);
|
||||
|
||||
-- Get transaction receipt
|
||||
SELECT alchemy_utils.rpc(
|
||||
'eth-mainnet',
|
||||
'eth_getTransactionReceipt',
|
||||
['0x1234567890abcdef...']
|
||||
);
|
||||
```
|
||||
|
||||
### Multi-Network Analysis
|
||||
|
||||
#### Compare NFT Holdings Across Networks
|
||||
```sql
|
||||
-- Get BAYC holdings on Ethereum
|
||||
WITH eth_nfts AS (
|
||||
SELECT 'ethereum' as network, alchemy.get_nfts_for_owner(
|
||||
'eth-mainnet',
|
||||
'0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b'
|
||||
) as nfts
|
||||
),
|
||||
-- Get NFTs on Polygon
|
||||
polygon_nfts AS (
|
||||
SELECT 'polygon' as network, alchemy.get_nfts_for_owner(
|
||||
'polygon-mainnet',
|
||||
'0x742d35Cc6634C0532925a3b8D45C5f8B9a8Fb15b'
|
||||
) as nfts
|
||||
)
|
||||
SELECT network, nfts:totalCount::INTEGER as nft_count
|
||||
FROM eth_nfts
|
||||
UNION ALL
|
||||
SELECT network, nfts:totalCount::INTEGER
|
||||
FROM polygon_nfts;
|
||||
```
|
||||
|
||||
### Advanced Analytics
|
||||
|
||||
#### NFT Floor Price Tracking
|
||||
```sql
|
||||
-- Track collection stats over time
|
||||
WITH collection_data AS (
|
||||
SELECT alchemy.get_nfts_for_collection(
|
||||
'eth-mainnet',
|
||||
'0xBC4CA0EdA7647A8aB7C2061c2E118A18a936f13D', -- BAYC
|
||||
{'pageSize': 1}
|
||||
) as collection_info
|
||||
)
|
||||
SELECT
|
||||
collection_info:contract:name::STRING as collection_name,
|
||||
collection_info:contract:totalSupply::INTEGER as total_supply,
|
||||
CURRENT_TIMESTAMP as snapshot_time
|
||||
FROM collection_data;
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
Handle API errors and rate limits:
|
||||
|
||||
```sql
|
||||
WITH api_response AS (
|
||||
SELECT alchemy.get_nfts_for_owner(
|
||||
'eth-mainnet',
|
||||
'0xinvalid-address'
|
||||
) as response
|
||||
)
|
||||
SELECT
|
||||
CASE
|
||||
WHEN response:error IS NOT NULL THEN
|
||||
CONCAT('API Error: ', response:error:message::STRING)
|
||||
WHEN response:ownedNfts IS NOT NULL THEN
|
||||
CONCAT('Success: Found ', ARRAY_SIZE(response:ownedNfts), ' NFTs')
|
||||
ELSE
|
||||
'Unexpected response format'
|
||||
END as result
|
||||
FROM api_response;
|
||||
```
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
Alchemy API has the following rate limits:
|
||||
- **Free tier**: 300 requests per second
|
||||
- **Growth tier**: 660 requests per second
|
||||
- **Scale tier**: Custom limits
|
||||
|
||||
The functions automatically handle rate limiting through Livequery's retry mechanisms.
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Use pagination**: For large datasets, use `pageSize` and pagination tokens
|
||||
2. **Filter requests**: Use `contractAddresses` to limit scope when possible
|
||||
3. **Cache results**: Store frequently accessed data in tables
|
||||
4. **Monitor usage**: Track API calls to stay within limits
|
||||
5. **Network selection**: Choose the most relevant network for your use case
|
||||
|
||||
## Supported Categories
|
||||
|
||||
For asset transfers, use these categories:
|
||||
- `erc20` - ERC-20 token transfers
|
||||
- `erc721` - NFT transfers
|
||||
- `erc1155` - Multi-token standard transfers
|
||||
- `internal` - Internal ETH transfers
|
||||
- `external` - External ETH transfers
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Alchemy API Reference](https://docs.alchemy.com/reference/api-overview)
|
||||
- [NFT API](https://docs.alchemy.com/reference/nft-api-quickstart)
|
||||
- [Token API](https://docs.alchemy.com/reference/token-api-quickstart)
|
||||
- [Enhanced API Methods](https://docs.alchemy.com/reference/enhanced-api-quickstart)
|
||||
36
macros/marketplace/allday/README.md
Normal file
36
macros/marketplace/allday/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# NBA All Day API Integration
|
||||
|
||||
NBA All Day is Dapper Labs' basketball NFT platform, offering officially licensed NBA Moments as digital collectibles.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your NBA All Day API key from [Dapper Labs developer portal](https://developers.dapperlabs.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/ALLDAY`
|
||||
|
||||
3. Deploy the All Day marketplace functions:
|
||||
```bash
|
||||
dbt run --models allday__ allday_utils__allday_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `allday.get(path, query_args)`
|
||||
Make GET requests to NBA All Day API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get NBA All Day collections
|
||||
SELECT allday.get('/collections', {});
|
||||
|
||||
-- Get specific moment details
|
||||
SELECT allday.get('/moments/12345', {});
|
||||
|
||||
-- Search for moments by player
|
||||
SELECT allday.get('/moments', {'player_id': 'lebron-james'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [NBA All Day API Documentation](https://developers.dapperlabs.com/)
|
||||
39
macros/marketplace/apilayer/README.md
Normal file
39
macros/marketplace/apilayer/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# API Layer Integration
|
||||
|
||||
API Layer provides a comprehensive suite of APIs including currency conversion, geolocation, weather data, and more utility APIs.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your API Layer API key from [API Layer Dashboard](https://apilayer.com/dashboard)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/APILAYER`
|
||||
|
||||
3. Deploy the API Layer marketplace functions:
|
||||
```bash
|
||||
dbt run --models apilayer__ apilayer_utils__apilayer_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `apilayer.get(path, query_args)`
|
||||
Make GET requests to API Layer API endpoints.
|
||||
|
||||
### `apilayer.post(path, body)`
|
||||
Make POST requests to API Layer API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get currency exchange rates
|
||||
SELECT apilayer.get('/exchangerates_data/latest', {'base': 'USD', 'symbols': 'EUR,GBP,JPY'});
|
||||
|
||||
-- Get IP geolocation data
|
||||
SELECT apilayer.get('/ip_api/check', {'ip': '8.8.8.8'});
|
||||
|
||||
-- Validate email address
|
||||
SELECT apilayer.get('/email_validation/check', {'email': 'test@example.com'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [API Layer Documentation](https://apilayer.com/marketplace)
|
||||
39
macros/marketplace/binance/README.md
Normal file
39
macros/marketplace/binance/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Binance API Integration
|
||||
|
||||
Binance is the world's largest cryptocurrency exchange by trading volume, providing access to spot trading, futures, and market data.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Binance API key from [Binance API Management](https://www.binance.com/en/my/settings/api-management)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/BINANCE`
|
||||
|
||||
3. Deploy the Binance marketplace functions:
|
||||
```bash
|
||||
dbt run --models binance__ binance_utils__binance_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `binance.get(path, query_args)`
|
||||
Make GET requests to Binance API endpoints.
|
||||
|
||||
### `binance.post(path, body)`
|
||||
Make POST requests to Binance API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get current Bitcoin price
|
||||
SELECT binance.get('/api/v3/ticker/price', {'symbol': 'BTCUSDT'});
|
||||
|
||||
-- Get 24hr ticker statistics
|
||||
SELECT binance.get('/api/v3/ticker/24hr', {'symbol': 'ETHUSDT'});
|
||||
|
||||
-- Get order book depth
|
||||
SELECT binance.get('/api/v3/depth', {'symbol': 'ADAUSDT', 'limit': 100});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Binance API Documentation](https://binance-docs.github.io/apidocs/spot/en/)
|
||||
45
macros/marketplace/bitquery/README.md
Normal file
45
macros/marketplace/bitquery/README.md
Normal file
@ -0,0 +1,45 @@
|
||||
# Bitquery API Integration
|
||||
|
||||
Bitquery provides GraphQL APIs for blockchain data across multiple networks including Bitcoin, Ethereum, Binance Smart Chain, and many others.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Bitquery API key from [Bitquery IDE](https://ide.bitquery.io/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/BITQUERY`
|
||||
|
||||
3. Deploy the Bitquery marketplace functions:
|
||||
```bash
|
||||
dbt run --models bitquery__ bitquery_utils__bitquery_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `bitquery.get(path, query_args)`
|
||||
Make GET requests to Bitquery API endpoints.
|
||||
|
||||
### `bitquery.post(path, body)`
|
||||
Make POST requests to Bitquery API endpoints for GraphQL queries.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get Ethereum DEX trades
|
||||
SELECT bitquery.post('/graphql', {
|
||||
'query': 'query { ethereum { dexTrades(date: {since: "2023-01-01"}) { count } } }'
|
||||
});
|
||||
|
||||
-- Get Bitcoin transactions
|
||||
SELECT bitquery.post('/graphql', {
|
||||
'query': 'query { bitcoin { transactions(date: {since: "2023-01-01"}) { count } } }'
|
||||
});
|
||||
|
||||
-- Get token transfers on BSC
|
||||
SELECT bitquery.post('/graphql', {
|
||||
'query': 'query { ethereum(network: bsc) { transfers(date: {since: "2023-01-01"}) { count } } }'
|
||||
});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Bitquery API Documentation](https://docs.bitquery.io/)
|
||||
39
macros/marketplace/blockpour/README.md
Normal file
39
macros/marketplace/blockpour/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Blockpour API Integration
|
||||
|
||||
Blockpour provides blockchain infrastructure and data services with high-performance APIs for accessing on-chain data.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Blockpour API key from [Blockpour Dashboard](https://blockpour.com/dashboard)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/BLOCKPOUR`
|
||||
|
||||
3. Deploy the Blockpour marketplace functions:
|
||||
```bash
|
||||
dbt run --models blockpour__ blockpour_utils__blockpour_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `blockpour.get(path, query_args)`
|
||||
Make GET requests to Blockpour API endpoints.
|
||||
|
||||
### `blockpour.post(path, body)`
|
||||
Make POST requests to Blockpour API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get latest block information
|
||||
SELECT blockpour.get('/api/v1/blocks/latest', {});
|
||||
|
||||
-- Get transaction details
|
||||
SELECT blockpour.get('/api/v1/transactions/0x...', {});
|
||||
|
||||
-- Get token balances for an address
|
||||
SELECT blockpour.get('/api/v1/addresses/0x.../tokens', {});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Blockpour API Documentation](https://docs.blockpour.com/)
|
||||
39
macros/marketplace/chainbase/README.md
Normal file
39
macros/marketplace/chainbase/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Chainbase API Integration
|
||||
|
||||
Chainbase provides comprehensive blockchain data infrastructure with APIs for accessing multi-chain data, NFTs, and DeFi protocols.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Chainbase API key from [Chainbase Console](https://console.chainbase.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/CHAINBASE`
|
||||
|
||||
3. Deploy the Chainbase marketplace functions:
|
||||
```bash
|
||||
dbt run --models chainbase__ chainbase_utils__chainbase_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `chainbase.get(path, query_args)`
|
||||
Make GET requests to Chainbase API endpoints.
|
||||
|
||||
### `chainbase.post(path, body)`
|
||||
Make POST requests to Chainbase API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get token metadata
|
||||
SELECT chainbase.get('/v1/token/metadata', {'chain_id': 1, 'contract_address': '0x...'});
|
||||
|
||||
-- Get NFT collections
|
||||
SELECT chainbase.get('/v1/nft/collections', {'chain_id': 1, 'page': 1, 'limit': 20});
|
||||
|
||||
-- Get account token balances
|
||||
SELECT chainbase.get('/v1/account/tokens', {'chain_id': 1, 'address': '0x...', 'limit': 20});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Chainbase API Documentation](https://docs.chainbase.com/)
|
||||
54
macros/marketplace/chainstack/README.md
Normal file
54
macros/marketplace/chainstack/README.md
Normal file
@ -0,0 +1,54 @@
|
||||
# Chainstack API Integration
|
||||
|
||||
Chainstack provides managed blockchain infrastructure with high-performance nodes and APIs for multiple blockchain networks.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Chainstack API key from [Chainstack Console](https://console.chainstack.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/CHAINSTACK`
|
||||
|
||||
3. Deploy the Chainstack marketplace functions:
|
||||
```bash
|
||||
dbt run --models chainstack__ chainstack_utils__chainstack_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `chainstack.get(path, query_args)`
|
||||
Make GET requests to Chainstack API endpoints.
|
||||
|
||||
### `chainstack.post(path, body)`
|
||||
Make POST requests to Chainstack API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get latest block number
|
||||
SELECT chainstack.post('/rpc', {
|
||||
'jsonrpc': '2.0',
|
||||
'method': 'eth_blockNumber',
|
||||
'params': [],
|
||||
'id': 1
|
||||
});
|
||||
|
||||
-- Get account balance
|
||||
SELECT chainstack.post('/rpc', {
|
||||
'jsonrpc': '2.0',
|
||||
'method': 'eth_getBalance',
|
||||
'params': ['0x...', 'latest'],
|
||||
'id': 1
|
||||
});
|
||||
|
||||
-- Get transaction receipt
|
||||
SELECT chainstack.post('/rpc', {
|
||||
'jsonrpc': '2.0',
|
||||
'method': 'eth_getTransactionReceipt',
|
||||
'params': ['0x...'],
|
||||
'id': 1
|
||||
});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Chainstack API Documentation](https://docs.chainstack.com/)
|
||||
179
macros/marketplace/claude/README.md
Normal file
179
macros/marketplace/claude/README.md
Normal file
@ -0,0 +1,179 @@
|
||||
# Claude API Integration
|
||||
|
||||
Anthropic's Claude AI integration for sophisticated text analysis, content generation, and reasoning tasks. This integration provides access to Claude's advanced language models through Snowflake UDFs.
|
||||
|
||||
## Available Models
|
||||
|
||||
- **Claude 3.5 Sonnet**: Latest and most capable model for complex tasks
|
||||
- **Claude 3 Opus**: Powerful model for demanding use cases
|
||||
- **Claude 3 Sonnet**: Balanced performance and speed
|
||||
- **Claude 3 Haiku**: Fast and efficient for simple tasks
|
||||
|
||||
Check [Anthropic's documentation](https://docs.anthropic.com/claude/docs/models-overview) for the latest available models.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Claude API key from [Anthropic Console](https://console.anthropic.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/CLAUDE`
|
||||
|
||||
3. Deploy the Claude marketplace functions:
|
||||
```bash
|
||||
dbt run --models claude__ claude_utils__claude_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `claude_utils.post(path, body)`
|
||||
Make POST requests to Claude API endpoints.
|
||||
|
||||
### `claude_utils.get(path)`
|
||||
Make GET requests to Claude API endpoints.
|
||||
|
||||
### `claude_utils.delete_method(path)`
|
||||
Make DELETE requests to Claude API endpoints.
|
||||
|
||||
### `claude.chat_completions(messages[, model, max_tokens, temperature])`
|
||||
Send messages to Claude for chat completion.
|
||||
|
||||
### `claude.extract_response_text(claude_response)`
|
||||
Extract text content from Claude API responses.
|
||||
|
||||
## Examples
|
||||
|
||||
### Basic Chat
|
||||
```sql
|
||||
-- Simple conversation with Claude
|
||||
SELECT claude.chat_completions([
|
||||
{'role': 'user', 'content': 'Explain quantum computing in simple terms'}
|
||||
]);
|
||||
```
|
||||
|
||||
### Chat with System Prompt
|
||||
```sql
|
||||
-- Chat with system message and conversation history
|
||||
SELECT claude.chat_completions([
|
||||
{'role': 'system', 'content': 'You are a helpful data analyst.'},
|
||||
{'role': 'user', 'content': 'How do I optimize this SQL query?'},
|
||||
{'role': 'assistant', 'content': 'I can help you optimize your SQL query...'},
|
||||
{'role': 'user', 'content': 'SELECT * FROM large_table WHERE date > "2023-01-01"'}
|
||||
]);
|
||||
```
|
||||
|
||||
### Text Analysis
|
||||
```sql
|
||||
-- Analyze text sentiment and themes
|
||||
SELECT claude.chat_completions([
|
||||
{'role': 'user', 'content': 'Analyze the sentiment and key themes in this customer feedback: "The product is okay but customer service was terrible. Took forever to get help."'}
|
||||
]);
|
||||
```
|
||||
|
||||
### Code Generation
|
||||
```sql
|
||||
-- Generate Python code
|
||||
SELECT claude.chat_completions([
|
||||
{'role': 'user', 'content': 'Write a Python function to calculate the moving average of a list of numbers'}
|
||||
]);
|
||||
```
|
||||
|
||||
### Extract Response Text
|
||||
```sql
|
||||
-- Get just the text content from Claude's response
|
||||
WITH claude_response AS (
|
||||
SELECT claude.chat_completions([
|
||||
{'role': 'user', 'content': 'What is machine learning?'}
|
||||
]) as response
|
||||
)
|
||||
SELECT claude.extract_response_text(response) as answer
|
||||
FROM claude_response;
|
||||
```
|
||||
|
||||
### Batch Text Processing
|
||||
```sql
|
||||
-- Process multiple texts
|
||||
WITH texts AS (
|
||||
SELECT * FROM VALUES
|
||||
('Great product, highly recommend!'),
|
||||
('Terrible experience, would not buy again'),
|
||||
('Average quality, nothing special')
|
||||
AS t(feedback)
|
||||
)
|
||||
SELECT
|
||||
feedback,
|
||||
claude.extract_response_text(
|
||||
claude.chat_completions([
|
||||
{'role': 'user', 'content': CONCAT('Analyze sentiment (positive/negative/neutral): ', feedback)}
|
||||
])
|
||||
) as sentiment
|
||||
FROM texts;
|
||||
```
|
||||
|
||||
### Different Models
|
||||
```sql
|
||||
-- Use specific Claude model
|
||||
SELECT claude.chat_completions(
|
||||
[{'role': 'user', 'content': 'Write a complex analysis of market trends'}],
|
||||
'claude-3-opus-20240229', -- Use Opus for complex reasoning
|
||||
2000, -- max_tokens
|
||||
0.3 -- temperature
|
||||
);
|
||||
```
|
||||
|
||||
## Integration with GitHub Actions
|
||||
|
||||
This Claude integration is used by the GitHub Actions failure analysis system:
|
||||
|
||||
```sql
|
||||
-- Analyze GitHub Actions failures with Claude
|
||||
SELECT claude.extract_response_text(
|
||||
claude.chat_completions([
|
||||
{'role': 'user', 'content': CONCAT(
|
||||
'Analyze this CI/CD failure and provide root cause analysis: ',
|
||||
error_logs
|
||||
)}
|
||||
])
|
||||
) as ai_analysis
|
||||
FROM github_failures;
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
Check for errors in Claude responses:
|
||||
|
||||
```sql
|
||||
WITH response AS (
|
||||
SELECT claude.chat_completions([
|
||||
{'role': 'user', 'content': 'Hello Claude'}
|
||||
]) as result
|
||||
)
|
||||
SELECT
|
||||
CASE
|
||||
WHEN result:error IS NOT NULL THEN result:error:message::STRING
|
||||
ELSE claude.extract_response_text(result)
|
||||
END as final_response
|
||||
FROM response;
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Use appropriate models**: Haiku for simple tasks, Opus for complex reasoning
|
||||
2. **Set token limits**: Control costs with reasonable `max_tokens` values
|
||||
3. **Temperature control**: Lower values (0.1-0.3) for factual tasks, higher (0.7-1.0) for creative tasks
|
||||
4. **Context management**: Include relevant conversation history for better responses
|
||||
5. **Error handling**: Always check for API errors in responses
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
Claude API has usage limits based on your plan. The functions automatically handle rate limiting through Livequery's retry mechanisms.
|
||||
|
||||
## Security
|
||||
|
||||
- API keys are securely stored in Snowflake secrets
|
||||
- All communication uses HTTPS encryption
|
||||
- No sensitive data is logged or cached
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Claude API Reference](https://docs.anthropic.com/claude/reference/getting-started-with-the-api)
|
||||
- [Model Comparison](https://docs.anthropic.com/claude/docs/models-overview)
|
||||
- [Usage Guidelines](https://docs.anthropic.com/claude/docs/use-case-guides)
|
||||
36
macros/marketplace/cmc/README.md
Normal file
36
macros/marketplace/cmc/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# CoinMarketCap API Integration
|
||||
|
||||
CoinMarketCap is a leading cryptocurrency market data platform providing real-time and historical cryptocurrency prices, market capitalizations, and trading volumes.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your CoinMarketCap API key from [CoinMarketCap Pro API](https://pro.coinmarketcap.com/account)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/CMC`
|
||||
|
||||
3. Deploy the CoinMarketCap marketplace functions:
|
||||
```bash
|
||||
dbt run --models cmc__ cmc_utils__cmc_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `cmc.get(path, query_args)`
|
||||
Make GET requests to CoinMarketCap API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get latest cryptocurrency listings
|
||||
SELECT cmc.get('/v1/cryptocurrency/listings/latest', {'limit': 100});
|
||||
|
||||
-- Get specific cryptocurrency quotes
|
||||
SELECT cmc.get('/v2/cryptocurrency/quotes/latest', {'symbol': 'BTC,ETH,ADA'});
|
||||
|
||||
-- Get cryptocurrency metadata
|
||||
SELECT cmc.get('/v2/cryptocurrency/info', {'symbol': 'BTC'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [CoinMarketCap API Documentation](https://coinmarketcap.com/api/documentation/v1/)
|
||||
76
macros/marketplace/coingecko/README.md
Normal file
76
macros/marketplace/coingecko/README.md
Normal file
@ -0,0 +1,76 @@
|
||||
# CoinGecko API Integration
|
||||
|
||||
Comprehensive cryptocurrency market data integration using CoinGecko's Pro API for prices, market data, and trading information.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your CoinGecko Pro API key from [CoinGecko Pro](https://pro.coingecko.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/COINGECKO`
|
||||
|
||||
3. Deploy the CoinGecko marketplace functions:
|
||||
```bash
|
||||
dbt run --models coingecko__ coingecko_utils__coingecko_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `coingecko.get(path, query_args)`
|
||||
Make GET requests to CoinGecko Pro API endpoints.
|
||||
|
||||
### `coingecko.post(path, body)`
|
||||
Make POST requests to CoinGecko Pro API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
### Price Data
|
||||
```sql
|
||||
-- Get current price for Bitcoin
|
||||
SELECT coingecko.get('/api/v3/simple/price', {
|
||||
'ids': 'bitcoin',
|
||||
'vs_currencies': 'usd,eth',
|
||||
'include_24hr_change': 'true'
|
||||
});
|
||||
|
||||
-- Get historical prices
|
||||
SELECT coingecko.get('/api/v3/coins/bitcoin/history', {
|
||||
'date': '30-12-2023'
|
||||
});
|
||||
```
|
||||
|
||||
### Market Data
|
||||
```sql
|
||||
-- Get top cryptocurrencies by market cap
|
||||
SELECT coingecko.get('/api/v3/coins/markets', {
|
||||
'vs_currency': 'usd',
|
||||
'order': 'market_cap_desc',
|
||||
'per_page': 100,
|
||||
'page': 1
|
||||
});
|
||||
|
||||
-- Get global cryptocurrency statistics
|
||||
SELECT coingecko.get('/api/v3/global', {});
|
||||
```
|
||||
|
||||
### Token Information
|
||||
```sql
|
||||
-- Get detailed coin information
|
||||
SELECT coingecko.get('/api/v3/coins/ethereum', {
|
||||
'localization': 'false',
|
||||
'tickers': 'false',
|
||||
'market_data': 'true',
|
||||
'community_data': 'true'
|
||||
});
|
||||
```
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
CoinGecko Pro API limits:
|
||||
- **Basic**: 10,000 calls/month
|
||||
- **Premium**: 50,000 calls/month
|
||||
- **Enterprise**: Custom limits
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [CoinGecko Pro API Documentation](https://apiguide.coingecko.com/getting-started/introduction)
|
||||
- [API Endpoints Reference](https://docs.coingecko.com/reference/introduction)
|
||||
36
macros/marketplace/covalent/README.md
Normal file
36
macros/marketplace/covalent/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# Covalent API Integration
|
||||
|
||||
Covalent provides a unified API to access rich blockchain data across multiple networks, offering historical and real-time data for wallets, transactions, and DeFi protocols.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Covalent API key from [Covalent Dashboard](https://www.covalenthq.com/platform/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/COVALENT`
|
||||
|
||||
3. Deploy the Covalent marketplace functions:
|
||||
```bash
|
||||
dbt run --models covalent__ covalent_utils__covalent_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `covalent.get(path, query_args)`
|
||||
Make GET requests to Covalent API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get token balances for an address
|
||||
SELECT covalent.get('/v1/1/address/0x.../balances_v2/', {});
|
||||
|
||||
-- Get transaction history for an address
|
||||
SELECT covalent.get('/v1/1/address/0x.../transactions_v2/', {'page-size': 100});
|
||||
|
||||
-- Get NFTs owned by an address
|
||||
SELECT covalent.get('/v1/1/address/0x.../balances_nft/', {});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Covalent API Documentation](https://www.covalenthq.com/docs/api/)
|
||||
39
macros/marketplace/credmark/README.md
Normal file
39
macros/marketplace/credmark/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Credmark API Integration
|
||||
|
||||
Credmark provides DeFi risk modeling and analytics APIs with comprehensive data on lending protocols, token prices, and risk metrics.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Credmark API key from [Credmark Portal](https://gateway.credmark.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/CREDMARK`
|
||||
|
||||
3. Deploy the Credmark marketplace functions:
|
||||
```bash
|
||||
dbt run --models credmark__ credmark_utils__credmark_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `credmark.get(path, query_args)`
|
||||
Make GET requests to Credmark API endpoints.
|
||||
|
||||
### `credmark.post(path, body)`
|
||||
Make POST requests to Credmark API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get token price
|
||||
SELECT credmark.get('/v1/model/token.price', {'token_address': '0x...', 'block_number': 'latest'});
|
||||
|
||||
-- Get portfolio risk metrics
|
||||
SELECT credmark.post('/v1/model/finance.var-portfolio', {'addresses': ['0x...'], 'window': 30});
|
||||
|
||||
-- Get lending pool information
|
||||
SELECT credmark.get('/v1/model/compound-v2.pool-info', {'token_address': '0x...'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Credmark API Documentation](https://docs.credmark.com/)
|
||||
39
macros/marketplace/dapplooker/README.md
Normal file
39
macros/marketplace/dapplooker/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# DappLooker API Integration
|
||||
|
||||
DappLooker provides blockchain analytics and data visualization platform with APIs for accessing DeFi, NFT, and on-chain metrics across multiple networks.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your DappLooker API key from [DappLooker Dashboard](https://dapplooker.com/dashboard)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/DAPPLOOKER`
|
||||
|
||||
3. Deploy the DappLooker marketplace functions:
|
||||
```bash
|
||||
dbt run --models dapplooker__ dapplooker_utils__dapplooker_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `dapplooker.get(path, query_args)`
|
||||
Make GET requests to DappLooker API endpoints.
|
||||
|
||||
### `dapplooker.post(path, body)`
|
||||
Make POST requests to DappLooker API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get DeFi protocol metrics
|
||||
SELECT dapplooker.get('/api/v1/defi/protocols', {'network': 'ethereum'});
|
||||
|
||||
-- Get NFT collection statistics
|
||||
SELECT dapplooker.get('/api/v1/nft/collections/stats', {'collection': '0x...'});
|
||||
|
||||
-- Get wallet analytics
|
||||
SELECT dapplooker.get('/api/v1/wallet/analytics', {'address': '0x...', 'network': 'ethereum'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [DappLooker API Documentation](https://docs.dapplooker.com/)
|
||||
36
macros/marketplace/dappradar/README.md
Normal file
36
macros/marketplace/dappradar/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# DappRadar API Integration
|
||||
|
||||
DappRadar is a leading DApp analytics platform providing comprehensive data on decentralized applications, DeFi protocols, NFT collections, and blockchain games.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your DappRadar API key from [DappRadar API Dashboard](https://dappradar.com/api)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/DAPPRADAR`
|
||||
|
||||
3. Deploy the DappRadar marketplace functions:
|
||||
```bash
|
||||
dbt run --models dappradar__ dappradar_utils__dappradar_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `dappradar.get(path, query_args)`
|
||||
Make GET requests to DappRadar API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get top DApps by category
|
||||
SELECT dappradar.get('/dapps', {'chain': 'ethereum', 'category': 'defi', 'limit': 50});
|
||||
|
||||
-- Get DApp details
|
||||
SELECT dappradar.get('/dapps/1', {});
|
||||
|
||||
-- Get NFT collection rankings
|
||||
SELECT dappradar.get('/nft/collections', {'chain': 'ethereum', 'range': '24h', 'limit': 100});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [DappRadar API Documentation](https://docs.dappradar.com/)
|
||||
39
macros/marketplace/deepnftvalue/README.md
Normal file
39
macros/marketplace/deepnftvalue/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# DeepNFTValue API Integration
|
||||
|
||||
DeepNFTValue provides AI-powered NFT valuation and analytics services, offering price predictions and market insights for NFT collections.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your DeepNFTValue API key from [DeepNFTValue Dashboard](https://deepnftvalue.com/dashboard)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/DEEPNFTVALUE`
|
||||
|
||||
3. Deploy the DeepNFTValue marketplace functions:
|
||||
```bash
|
||||
dbt run --models deepnftvalue__ deepnftvalue_utils__deepnftvalue_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `deepnftvalue.get(path, query_args)`
|
||||
Make GET requests to DeepNFTValue API endpoints.
|
||||
|
||||
### `deepnftvalue.post(path, body)`
|
||||
Make POST requests to DeepNFTValue API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get NFT valuation
|
||||
SELECT deepnftvalue.get('/api/v1/valuation', {'contract_address': '0x...', 'token_id': '1234'});
|
||||
|
||||
-- Get collection analytics
|
||||
SELECT deepnftvalue.get('/api/v1/collection/analytics', {'contract_address': '0x...'});
|
||||
|
||||
-- Get price predictions
|
||||
SELECT deepnftvalue.post('/api/v1/predict', {'contract_address': '0x...', 'token_ids': [1, 2, 3]});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [DeepNFTValue API Documentation](https://docs.deepnftvalue.com/)
|
||||
90
macros/marketplace/defillama/README.md
Normal file
90
macros/marketplace/defillama/README.md
Normal file
@ -0,0 +1,90 @@
|
||||
# DefiLlama API Integration
|
||||
|
||||
DeFi analytics and TVL (Total Value Locked) data integration using DefiLlama's comprehensive DeFi protocol database.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Most DefiLlama endpoints are free and don't require an API key
|
||||
|
||||
2. For premium endpoints, get your API key from [DefiLlama](https://defillama.com/docs/api)
|
||||
|
||||
3. Store the API key in Snowflake secrets under `_FSC_SYS/DEFILLAMA` (if using premium features)
|
||||
|
||||
4. Deploy the DefiLlama marketplace functions:
|
||||
```bash
|
||||
dbt run --models defillama__ defillama_utils__defillama_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `defillama.get(path, query_args)`
|
||||
Make GET requests to DefiLlama API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
### Protocol TVL Data
|
||||
```sql
|
||||
-- Get current TVL for all protocols
|
||||
SELECT defillama.get('/protocols', {});
|
||||
|
||||
-- Get specific protocol information
|
||||
SELECT defillama.get('/protocol/uniswap', {});
|
||||
|
||||
-- Get historical TVL for a protocol
|
||||
SELECT defillama.get('/protocol/aave', {});
|
||||
```
|
||||
|
||||
### Chain TVL Data
|
||||
```sql
|
||||
-- Get TVL for all chains
|
||||
SELECT defillama.get('/chains', {});
|
||||
|
||||
-- Get historical TVL for Ethereum
|
||||
SELECT defillama.get('/historicalChainTvl/Ethereum', {});
|
||||
```
|
||||
|
||||
### Yield Farming Data
|
||||
```sql
|
||||
-- Get current yields
|
||||
SELECT defillama.get('/yields', {});
|
||||
|
||||
-- Get yields for specific protocol
|
||||
SELECT defillama.get('/yields/project/aave', {});
|
||||
```
|
||||
|
||||
### Token Pricing
|
||||
```sql
|
||||
-- Get current token prices
|
||||
SELECT defillama.get('/prices/current/ethereum:0xA0b86a33E6417e8EdcfCfdD8fb59a3A5b3dB8BFD', {});
|
||||
|
||||
-- Get historical token prices
|
||||
SELECT defillama.get('/prices/historical/1640995200/ethereum:0xA0b86a33E6417e8EdcfCfdD8fb59a3A5b3dB8BFD', {});
|
||||
```
|
||||
|
||||
### Stablecoin Data
|
||||
```sql
|
||||
-- Get stablecoin market caps
|
||||
SELECT defillama.get('/stablecoins', {});
|
||||
|
||||
-- Get specific stablecoin information
|
||||
SELECT defillama.get('/stablecoin/1', {}); -- USDT
|
||||
```
|
||||
|
||||
### Bridge Data
|
||||
```sql
|
||||
-- Get bridge volumes
|
||||
SELECT defillama.get('/bridges', {});
|
||||
|
||||
-- Get specific bridge information
|
||||
SELECT defillama.get('/bridge/1', {});
|
||||
```
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
DefiLlama API is generally rate-limited to prevent abuse. Most endpoints are free to use.
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [DefiLlama API Documentation](https://defillama.com/docs/api)
|
||||
- [TVL API](https://defillama.com/docs/api#operations-tag-TVL)
|
||||
- [Yields API](https://defillama.com/docs/api#operations-tag-Yields)
|
||||
74
macros/marketplace/dune/README.md
Normal file
74
macros/marketplace/dune/README.md
Normal file
@ -0,0 +1,74 @@
|
||||
# Dune Analytics API Integration
|
||||
|
||||
Access Dune Analytics queries and results directly from Snowflake for blockchain data analysis and visualization.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Dune API key from [Dune Analytics](https://dune.com/settings/api)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/DUNE`
|
||||
|
||||
3. Deploy the Dune marketplace functions:
|
||||
```bash
|
||||
dbt run --models dune__ dune_utils__dune_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `dune.get(path, query_args)`
|
||||
Make GET requests to Dune API endpoints.
|
||||
|
||||
### `dune.post(path, body)`
|
||||
Make POST requests to Dune API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
### Execute Queries
|
||||
```sql
|
||||
-- Execute a Dune query
|
||||
SELECT dune.post('/api/v1/query/1234567/execute', {
|
||||
'query_parameters': {
|
||||
'token_address': '0xA0b86a33E6417e8EdcfCfdD8fb59a3A5b3dB8BFD'
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
### Get Query Results
|
||||
```sql
|
||||
-- Get results from executed query
|
||||
SELECT dune.get('/api/v1/execution/01234567-89ab-cdef-0123-456789abcdef/results', {});
|
||||
|
||||
-- Get latest results for a query
|
||||
SELECT dune.get('/api/v1/query/1234567/results', {});
|
||||
```
|
||||
|
||||
### Query Status
|
||||
```sql
|
||||
-- Check execution status
|
||||
SELECT dune.get('/api/v1/execution/01234567-89ab-cdef-0123-456789abcdef/status', {});
|
||||
```
|
||||
|
||||
### Parameterized Queries
|
||||
```sql
|
||||
-- Execute query with parameters
|
||||
SELECT dune.post('/api/v1/query/1234567/execute', {
|
||||
'query_parameters': {
|
||||
'start_date': '2023-01-01',
|
||||
'end_date': '2023-12-31',
|
||||
'min_amount': 1000
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
Dune API rate limits vary by plan:
|
||||
- **Free**: 20 executions per day
|
||||
- **Plus**: 1,000 executions per day
|
||||
- **Premium**: 10,000 executions per day
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Dune API Documentation](https://dune.com/docs/api/)
|
||||
- [Authentication](https://dune.com/docs/api/api-reference/authentication/)
|
||||
- [Query Execution](https://dune.com/docs/api/api-reference/execute-queries/)
|
||||
36
macros/marketplace/espn/README.md
Normal file
36
macros/marketplace/espn/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# ESPN API Integration
|
||||
|
||||
ESPN provides comprehensive sports data including scores, schedules, player statistics, and news across multiple sports leagues.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your ESPN API key from [ESPN Developer Portal](https://developer.espn.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/ESPN`
|
||||
|
||||
3. Deploy the ESPN marketplace functions:
|
||||
```bash
|
||||
dbt run --models espn__ espn_utils__espn_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `espn.get(path, query_args)`
|
||||
Make GET requests to ESPN API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get NFL scores
|
||||
SELECT espn.get('/v1/sports/football/nfl/scoreboard', {});
|
||||
|
||||
-- Get NBA team roster
|
||||
SELECT espn.get('/v1/sports/basketball/nba/teams/1/roster', {});
|
||||
|
||||
-- Get MLB standings
|
||||
SELECT espn.get('/v1/sports/baseball/mlb/standings', {});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [ESPN API Documentation](https://site.api.espn.com/apis/site/v2/sports/)
|
||||
39
macros/marketplace/footprint/README.md
Normal file
39
macros/marketplace/footprint/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Footprint Analytics API Integration
|
||||
|
||||
Footprint Analytics provides comprehensive blockchain data analytics with APIs for accessing DeFi, NFT, GameFi, and cross-chain data insights.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Footprint API key from [Footprint Analytics Dashboard](https://www.footprint.network/dashboard)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/FOOTPRINT`
|
||||
|
||||
3. Deploy the Footprint marketplace functions:
|
||||
```bash
|
||||
dbt run --models footprint__ footprint_utils__footprint_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `footprint.get(path, query_args)`
|
||||
Make GET requests to Footprint Analytics API endpoints.
|
||||
|
||||
### `footprint.post(path, body)`
|
||||
Make POST requests to Footprint Analytics API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get DeFi protocol TVL data
|
||||
SELECT footprint.get('/api/v1/defi/protocol/tvl', {'protocol': 'uniswap', 'chain': 'ethereum'});
|
||||
|
||||
-- Get NFT market trends
|
||||
SELECT footprint.get('/api/v1/nft/market/overview', {'timeframe': '7d'});
|
||||
|
||||
-- Get GameFi protocol statistics
|
||||
SELECT footprint.get('/api/v1/gamefi/protocols', {'chain': 'polygon', 'limit': 20});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Footprint Analytics API Documentation](https://docs.footprint.network/)
|
||||
36
macros/marketplace/fred/README.md
Normal file
36
macros/marketplace/fred/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# FRED API Integration
|
||||
|
||||
FRED (Federal Reserve Economic Data) provides access to economic data from the Federal Reserve Bank of St. Louis, including GDP, inflation, employment, and financial market data.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your FRED API key from [FRED API Registration](https://fred.stlouisfed.org/docs/api/api_key.html)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/FRED`
|
||||
|
||||
3. Deploy the FRED marketplace functions:
|
||||
```bash
|
||||
dbt run --models fred__ fred_utils__fred_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `fred.get(path, query_args)`
|
||||
Make GET requests to FRED API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get GDP data
|
||||
SELECT fred.get('/series/observations', {'series_id': 'GDP', 'api_key': 'your_key'});
|
||||
|
||||
-- Get unemployment rate
|
||||
SELECT fred.get('/series/observations', {'series_id': 'UNRATE', 'api_key': 'your_key'});
|
||||
|
||||
-- Get inflation rate (CPI)
|
||||
SELECT fred.get('/series/observations', {'series_id': 'CPIAUCSL', 'api_key': 'your_key'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [FRED API Documentation](https://fred.stlouisfed.org/docs/api/fred/)
|
||||
668
macros/marketplace/github/README.md
Normal file
668
macros/marketplace/github/README.md
Normal file
@ -0,0 +1,668 @@
|
||||
# GitHub Actions Integration for Livequery
|
||||
|
||||
A comprehensive GitHub Actions integration that provides both scalar functions (UDFs) and table functions (UDTFs) for interacting with GitHub's REST API. Monitor workflows, retrieve logs, trigger dispatches, and analyze CI/CD data directly from your data warehouse.
|
||||
|
||||
## Prerequisites & Setup
|
||||
|
||||
### Authentication Setup
|
||||
|
||||
The integration uses GitHub Personal Access Tokens (PAT) or GitHub App tokens for authentication.
|
||||
|
||||
#### Option 1: Personal Access Token (Recommended for Development)
|
||||
|
||||
1. Go to [GitHub Settings → Developer settings → Personal access tokens](https://github.com/settings/tokens)
|
||||
2. Click "Generate new token (classic)"
|
||||
3. Select required scopes:
|
||||
- `repo` - Full control of private repositories
|
||||
- `actions:read` - Read access to Actions (minimum required)
|
||||
- `actions:write` - Write access to Actions (for triggering workflows)
|
||||
- `workflow` - Update GitHub Action workflows (for enable/disable)
|
||||
4. Copy the generated token
|
||||
5. Store securely in your secrets management system
|
||||
|
||||
#### Option 2: GitHub App (Recommended for Production)
|
||||
|
||||
1. Create a GitHub App in your organization settings
|
||||
2. Grant required permissions:
|
||||
- **Actions**: Read & Write
|
||||
- **Contents**: Read
|
||||
- **Metadata**: Read
|
||||
3. Install the app on repositories you want to access
|
||||
4. Use the app's installation token
|
||||
|
||||
### Environment Setup
|
||||
|
||||
The integration automatically handles authentication through Livequery's secrets management:
|
||||
|
||||
- **System users**: Uses `_FSC_SYS/GITHUB` secret path
|
||||
- **Regular users**: Uses `vault/github/api` secret path
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. List Repository Workflows
|
||||
|
||||
```sql
|
||||
-- Get all workflows for a repository
|
||||
SELECT * FROM TABLE(
|
||||
github_actions.tf_workflows('your-org', 'your-repo')
|
||||
);
|
||||
|
||||
-- Or as JSON object
|
||||
SELECT github_actions.workflows('your-org', 'your-repo') as workflows_data;
|
||||
```
|
||||
|
||||
### 2. Monitor Workflow Runs
|
||||
|
||||
```sql
|
||||
-- Get recent workflow runs with status filtering
|
||||
SELECT * FROM TABLE(
|
||||
github_actions.tf_runs('your-org', 'your-repo', {'status': 'completed', 'per_page': 10})
|
||||
);
|
||||
|
||||
-- Get runs for a specific workflow
|
||||
SELECT * FROM TABLE(
|
||||
github_actions.tf_workflow_runs('your-org', 'your-repo', 'ci.yml')
|
||||
);
|
||||
```
|
||||
|
||||
### 3. Analyze Failed Jobs
|
||||
|
||||
```sql
|
||||
-- Get failed jobs with complete logs for troubleshooting
|
||||
SELECT
|
||||
job_name,
|
||||
job_conclusion,
|
||||
job_url,
|
||||
logs
|
||||
FROM TABLE(
|
||||
github_actions.tf_failed_jobs_with_logs('your-org', 'your-repo', '12345678')
|
||||
);
|
||||
```
|
||||
|
||||
### 4. Trigger Workflow Dispatch
|
||||
|
||||
```sql
|
||||
-- Trigger a workflow manually
|
||||
SELECT github_actions.workflow_dispatches(
|
||||
'your-org',
|
||||
'your-repo',
|
||||
'deploy.yml',
|
||||
{
|
||||
'ref': 'main',
|
||||
'inputs': {
|
||||
'environment': 'staging',
|
||||
'debug': 'true'
|
||||
}
|
||||
}
|
||||
) as dispatch_result;
|
||||
```
|
||||
|
||||
## Function Reference
|
||||
|
||||
### Utility Functions (`github_utils` schema)
|
||||
|
||||
#### `github_utils.octocat()`
|
||||
Test GitHub API connectivity and authentication.
|
||||
```sql
|
||||
SELECT github_utils.octocat();
|
||||
-- Returns: GitHub API response with Octocat ASCII art
|
||||
```
|
||||
|
||||
#### `github_utils.headers()`
|
||||
Get properly formatted GitHub API headers.
|
||||
```sql
|
||||
SELECT github_utils.headers();
|
||||
-- Returns: '{"Authorization": "Bearer {TOKEN}", ...}'
|
||||
```
|
||||
|
||||
#### `github_utils.get(route, query)`
|
||||
Make GET requests to GitHub API.
|
||||
```sql
|
||||
SELECT github_utils.get('repos/your-org/your-repo', {'per_page': 10});
|
||||
```
|
||||
|
||||
#### `github_utils.post(route, data)`
|
||||
Make POST requests to GitHub API.
|
||||
```sql
|
||||
SELECT github_utils.post('repos/your-org/your-repo/issues', {
|
||||
'title': 'New Issue',
|
||||
'body': 'Issue description'
|
||||
});
|
||||
```
|
||||
|
||||
#### `github_utils.put(route, data)`
|
||||
Make PUT requests to GitHub API.
|
||||
```sql
|
||||
SELECT github_utils.put('repos/your-org/your-repo/actions/workflows/ci.yml/enable', {});
|
||||
```
|
||||
|
||||
### Workflow Functions (`github_actions` schema)
|
||||
|
||||
#### Scalar Functions (Return JSON Objects)
|
||||
|
||||
##### `github_actions.workflows(owner, repo[, query])`
|
||||
List repository workflows.
|
||||
```sql
|
||||
-- Basic usage
|
||||
SELECT github_actions.workflows('FlipsideCrypto', 'admin-models');
|
||||
|
||||
-- With query parameters
|
||||
SELECT github_actions.workflows('FlipsideCrypto', 'admin-models', {'per_page': 50});
|
||||
```
|
||||
|
||||
##### `github_actions.runs(owner, repo[, query])`
|
||||
List workflow runs for a repository.
|
||||
```sql
|
||||
-- Get recent runs
|
||||
SELECT github_actions.runs('your-org', 'your-repo');
|
||||
|
||||
-- Filter by status and branch
|
||||
SELECT github_actions.runs('your-org', 'your-repo', {
|
||||
'status': 'completed',
|
||||
'branch': 'main',
|
||||
'per_page': 20
|
||||
});
|
||||
```
|
||||
|
||||
##### `github_actions.workflow_runs(owner, repo, workflow_id[, query])`
|
||||
List runs for a specific workflow.
|
||||
```sql
|
||||
-- Get runs for CI workflow
|
||||
SELECT github_actions.workflow_runs('your-org', 'your-repo', 'ci.yml');
|
||||
|
||||
-- With filtering
|
||||
SELECT github_actions.workflow_runs('your-org', 'your-repo', 'ci.yml', {
|
||||
'status': 'failure',
|
||||
'per_page': 10
|
||||
});
|
||||
```
|
||||
|
||||
##### `github_actions.workflow_dispatches(owner, repo, workflow_id[, body])`
|
||||
Trigger a workflow dispatch event.
|
||||
```sql
|
||||
-- Simple dispatch (uses main branch)
|
||||
SELECT github_actions.workflow_dispatches('your-org', 'your-repo', 'deploy.yml');
|
||||
|
||||
-- With custom inputs
|
||||
SELECT github_actions.workflow_dispatches('your-org', 'your-repo', 'deploy.yml', {
|
||||
'ref': 'develop',
|
||||
'inputs': {
|
||||
'environment': 'staging',
|
||||
'version': '1.2.3'
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
##### `github_actions.workflow_enable(owner, repo, workflow_id)`
|
||||
Enable a workflow.
|
||||
```sql
|
||||
SELECT github_actions.workflow_enable('your-org', 'your-repo', 'ci.yml');
|
||||
```
|
||||
|
||||
##### `github_actions.workflow_disable(owner, repo, workflow_id)`
|
||||
Disable a workflow.
|
||||
```sql
|
||||
SELECT github_actions.workflow_disable('your-org', 'your-repo', 'ci.yml');
|
||||
```
|
||||
|
||||
##### `github_actions.workflow_run_logs(owner, repo, run_id)`
|
||||
Get download URL for workflow run logs.
|
||||
```sql
|
||||
SELECT github_actions.workflow_run_logs('your-org', 'your-repo', '12345678');
|
||||
```
|
||||
|
||||
##### `github_actions.job_logs(owner, repo, job_id)`
|
||||
Get plain text logs for a specific job.
|
||||
```sql
|
||||
SELECT github_actions.job_logs('your-org', 'your-repo', '87654321');
|
||||
```
|
||||
|
||||
##### `github_actions.workflow_run_jobs(owner, repo, run_id[, query])`
|
||||
List jobs for a workflow run.
|
||||
```sql
|
||||
-- Get all jobs
|
||||
SELECT github_actions.workflow_run_jobs('your-org', 'your-repo', '12345678');
|
||||
|
||||
-- Filter to latest attempt only
|
||||
SELECT github_actions.workflow_run_jobs('your-org', 'your-repo', '12345678', {
|
||||
'filter': 'latest'
|
||||
});
|
||||
```
|
||||
|
||||
#### Table Functions (Return Structured Data)
|
||||
|
||||
##### `github_actions.tf_workflows(owner, repo[, query])`
|
||||
List workflows as structured table data.
|
||||
```sql
|
||||
SELECT
|
||||
id,
|
||||
name,
|
||||
path,
|
||||
state,
|
||||
created_at,
|
||||
updated_at,
|
||||
badge_url,
|
||||
html_url
|
||||
FROM TABLE(github_actions.tf_workflows('your-org', 'your-repo'));
|
||||
```
|
||||
|
||||
##### `github_actions.tf_runs(owner, repo[, query])`
|
||||
List workflow runs as structured table data.
|
||||
```sql
|
||||
SELECT
|
||||
id,
|
||||
name,
|
||||
status,
|
||||
conclusion,
|
||||
head_branch,
|
||||
head_sha,
|
||||
run_number,
|
||||
event,
|
||||
created_at,
|
||||
updated_at,
|
||||
html_url
|
||||
FROM TABLE(github_actions.tf_runs('your-org', 'your-repo', {'per_page': 20}));
|
||||
```
|
||||
|
||||
##### `github_actions.tf_workflow_runs(owner, repo, workflow_id[, query])`
|
||||
List runs for a specific workflow as structured table data.
|
||||
```sql
|
||||
SELECT
|
||||
id,
|
||||
name,
|
||||
status,
|
||||
conclusion,
|
||||
run_number,
|
||||
head_branch,
|
||||
created_at,
|
||||
html_url
|
||||
FROM TABLE(github_actions.tf_workflow_runs('your-org', 'your-repo', 'ci.yml'));
|
||||
```
|
||||
|
||||
##### `github_actions.tf_workflow_run_jobs(owner, repo, run_id[, query])`
|
||||
List jobs for a workflow run as structured table data.
|
||||
```sql
|
||||
SELECT
|
||||
id,
|
||||
name,
|
||||
status,
|
||||
conclusion,
|
||||
started_at,
|
||||
completed_at,
|
||||
runner_name,
|
||||
runner_group_name,
|
||||
html_url
|
||||
FROM TABLE(github_actions.tf_workflow_run_jobs('your-org', 'your-repo', '12345678'));
|
||||
```
|
||||
|
||||
##### `github_actions.tf_failed_jobs_with_logs(owner, repo, run_id)`
|
||||
Get failed jobs with their complete logs for analysis.
|
||||
```sql
|
||||
SELECT
|
||||
job_id,
|
||||
job_name,
|
||||
job_status,
|
||||
job_conclusion,
|
||||
job_url,
|
||||
failed_steps,
|
||||
logs
|
||||
FROM TABLE(github_actions.tf_failed_jobs_with_logs('your-org', 'your-repo', '12345678'));
|
||||
```
|
||||
|
||||
## Advanced Usage Examples
|
||||
|
||||
### CI/CD Monitoring Dashboard
|
||||
|
||||
```sql
|
||||
-- Recent workflow runs with failure rate
|
||||
WITH recent_runs AS (
|
||||
SELECT
|
||||
name,
|
||||
status,
|
||||
conclusion,
|
||||
head_branch,
|
||||
created_at,
|
||||
html_url
|
||||
FROM TABLE(github_actions.tf_runs('your-org', 'your-repo', {'per_page': 100}))
|
||||
WHERE created_at >= CURRENT_DATE - 7
|
||||
)
|
||||
SELECT
|
||||
name,
|
||||
COUNT(*) as total_runs,
|
||||
COUNT(CASE WHEN conclusion = 'success' THEN 1 END) as successful_runs,
|
||||
COUNT(CASE WHEN conclusion = 'failure' THEN 1 END) as failed_runs,
|
||||
ROUND(COUNT(CASE WHEN conclusion = 'failure' THEN 1 END) * 100.0 / COUNT(*), 2) as failure_rate_pct
|
||||
FROM recent_runs
|
||||
GROUP BY name
|
||||
ORDER BY failure_rate_pct DESC;
|
||||
```
|
||||
|
||||
### Failed Job Analysis
|
||||
|
||||
#### Multi-Run Failure Analysis
|
||||
```sql
|
||||
-- Analyze failures across multiple runs
|
||||
WITH failed_jobs AS (
|
||||
SELECT
|
||||
r.id as run_id,
|
||||
r.name as workflow_name,
|
||||
r.head_branch,
|
||||
r.created_at as run_created_at,
|
||||
j.job_name,
|
||||
j.job_conclusion,
|
||||
j.logs
|
||||
FROM TABLE(github_actions.tf_runs('your-org', 'your-repo', {'status': 'completed'})) r
|
||||
CROSS JOIN TABLE(github_actions.tf_failed_jobs_with_logs('your-org', 'your-repo', r.id::TEXT)) j
|
||||
WHERE r.conclusion = 'failure'
|
||||
AND r.created_at >= CURRENT_DATE - 3
|
||||
)
|
||||
SELECT
|
||||
workflow_name,
|
||||
job_name,
|
||||
COUNT(*) as failure_count,
|
||||
ARRAY_AGG(DISTINCT head_branch) as affected_branches,
|
||||
ARRAY_AGG(logs LIMIT 3) as sample_logs
|
||||
FROM failed_jobs
|
||||
GROUP BY workflow_name, job_name
|
||||
ORDER BY failure_count DESC;
|
||||
```
|
||||
|
||||
#### Specific Job Log Analysis
|
||||
```sql
|
||||
-- Get detailed logs for a specific failed job
|
||||
WITH specific_job AS (
|
||||
SELECT
|
||||
id as job_id,
|
||||
name as job_name,
|
||||
status,
|
||||
conclusion,
|
||||
started_at,
|
||||
completed_at,
|
||||
html_url,
|
||||
steps
|
||||
FROM TABLE(github_actions.tf_workflow_run_jobs('your-org', 'your-repo', '12345678'))
|
||||
WHERE name = 'Build and Test' -- Specify the job name you want to analyze
|
||||
AND conclusion = 'failure'
|
||||
)
|
||||
SELECT
|
||||
job_id,
|
||||
job_name,
|
||||
status,
|
||||
conclusion,
|
||||
started_at,
|
||||
completed_at,
|
||||
html_url,
|
||||
steps,
|
||||
github_actions.job_logs('your-org', 'your-repo', job_id::TEXT) as full_logs
|
||||
FROM specific_job;
|
||||
```
|
||||
|
||||
#### From Workflow ID to Failed Logs
|
||||
```sql
|
||||
-- Complete workflow: Workflow ID → Run ID → Failed Logs
|
||||
WITH latest_failed_run AS (
|
||||
-- Step 1: Get the most recent failed run for your workflow
|
||||
SELECT
|
||||
id as run_id,
|
||||
name as workflow_name,
|
||||
status,
|
||||
conclusion,
|
||||
head_branch,
|
||||
head_sha,
|
||||
created_at,
|
||||
html_url as run_url
|
||||
FROM TABLE(github_actions.tf_workflow_runs('your-org', 'your-repo', 'ci.yml')) -- Your workflow ID here
|
||||
WHERE conclusion = 'failure'
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 1
|
||||
),
|
||||
failed_jobs_with_logs AS (
|
||||
-- Step 2: Get all failed jobs and their logs for that run
|
||||
SELECT
|
||||
r.run_id,
|
||||
r.workflow_name,
|
||||
r.head_branch,
|
||||
r.head_sha,
|
||||
r.created_at,
|
||||
r.run_url,
|
||||
j.job_id,
|
||||
j.job_name,
|
||||
j.job_status,
|
||||
j.job_conclusion,
|
||||
j.job_url,
|
||||
j.failed_steps,
|
||||
j.logs
|
||||
FROM latest_failed_run r
|
||||
CROSS JOIN TABLE(github_actions.tf_failed_jobs_with_logs('your-org', 'your-repo', r.run_id::TEXT)) j
|
||||
)
|
||||
SELECT
|
||||
run_id,
|
||||
workflow_name,
|
||||
head_branch,
|
||||
created_at,
|
||||
run_url,
|
||||
job_name,
|
||||
job_url,
|
||||
-- Extract key error information from logs
|
||||
CASE
|
||||
WHEN CONTAINS(logs, 'npm ERR!') THEN 'NPM Error'
|
||||
WHEN CONTAINS(logs, 'fatal:') THEN 'Git Error'
|
||||
WHEN CONTAINS(logs, 'Error: Process completed with exit code') THEN 'Process Exit Error'
|
||||
WHEN CONTAINS(logs, 'timeout') THEN 'Timeout Error'
|
||||
ELSE 'Other Error'
|
||||
END as error_type,
|
||||
-- Get first error line from logs
|
||||
REGEXP_SUBSTR(logs, '.*Error[^\\n]*', 1, 1) as first_error_line,
|
||||
-- Full logs for detailed analysis
|
||||
logs as full_logs
|
||||
FROM failed_jobs_with_logs
|
||||
ORDER BY job_name;
|
||||
```
|
||||
|
||||
#### Quick Workflow ID to Run ID Lookup
|
||||
```sql
|
||||
-- Simple: Just get run IDs for a specific workflow
|
||||
SELECT
|
||||
id as run_id,
|
||||
status,
|
||||
conclusion,
|
||||
head_branch,
|
||||
created_at,
|
||||
html_url
|
||||
FROM TABLE(github_actions.tf_workflow_runs('your-org', 'your-repo', 'ci.yml')) -- Replace with your workflow ID
|
||||
WHERE conclusion = 'failure'
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 5;
|
||||
```
|
||||
|
||||
#### Failed Steps Deep Dive
|
||||
```sql
|
||||
-- Analyze failed steps within jobs and extract error patterns
|
||||
WITH job_details AS (
|
||||
SELECT
|
||||
id as job_id,
|
||||
name as job_name,
|
||||
conclusion,
|
||||
steps,
|
||||
github_actions.job_logs('your-org', 'your-repo', id::TEXT) as logs
|
||||
FROM TABLE(github_actions.tf_workflow_run_jobs('your-org', 'your-repo', '12345678'))
|
||||
WHERE conclusion = 'failure'
|
||||
),
|
||||
failed_steps AS (
|
||||
SELECT
|
||||
job_id,
|
||||
job_name,
|
||||
step.value:name::STRING as step_name,
|
||||
step.value:conclusion::STRING as step_conclusion,
|
||||
step.value:number::INTEGER as step_number,
|
||||
logs
|
||||
FROM job_details,
|
||||
LATERAL FLATTEN(input => steps:steps) step
|
||||
WHERE step.value:conclusion::STRING = 'failure'
|
||||
)
|
||||
SELECT
|
||||
job_name,
|
||||
step_name,
|
||||
step_number,
|
||||
step_conclusion,
|
||||
-- Extract error messages from logs (first 1000 chars)
|
||||
SUBSTR(logs, GREATEST(1, CHARINDEX('Error:', logs) - 50), 1000) as error_context,
|
||||
-- Extract common error patterns
|
||||
CASE
|
||||
WHEN CONTAINS(logs, 'npm ERR!') THEN 'NPM Error'
|
||||
WHEN CONTAINS(logs, 'fatal:') THEN 'Git Error'
|
||||
WHEN CONTAINS(logs, 'Error: Process completed with exit code') THEN 'Process Exit Error'
|
||||
WHEN CONTAINS(logs, 'timeout') THEN 'Timeout Error'
|
||||
WHEN CONTAINS(logs, 'permission denied') THEN 'Permission Error'
|
||||
ELSE 'Other Error'
|
||||
END as error_category
|
||||
FROM failed_steps
|
||||
ORDER BY job_name, step_number;
|
||||
```
|
||||
|
||||
### Workflow Performance Metrics
|
||||
|
||||
```sql
|
||||
-- Average workflow duration by branch
|
||||
SELECT
|
||||
head_branch,
|
||||
AVG(DATEDIFF(second, run_started_at, updated_at)) as avg_duration_seconds,
|
||||
COUNT(*) as run_count,
|
||||
COUNT(CASE WHEN conclusion = 'success' THEN 1 END) as success_count
|
||||
FROM TABLE(github_actions.tf_runs('your-org', 'your-repo', {'per_page': 200}))
|
||||
WHERE run_started_at IS NOT NULL
|
||||
AND updated_at IS NOT NULL
|
||||
AND status = 'completed'
|
||||
AND created_at >= CURRENT_DATE - 30
|
||||
GROUP BY head_branch
|
||||
ORDER BY avg_duration_seconds DESC;
|
||||
```
|
||||
|
||||
### Automated Workflow Management
|
||||
|
||||
```sql
|
||||
-- Conditionally trigger deployment based on main branch success
|
||||
WITH latest_main_run AS (
|
||||
SELECT
|
||||
id,
|
||||
conclusion,
|
||||
head_sha,
|
||||
created_at
|
||||
FROM TABLE(github_actions.tf_runs('your-org', 'your-repo', {
|
||||
'branch': 'main',
|
||||
'per_page': 1
|
||||
}))
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 1
|
||||
)
|
||||
SELECT
|
||||
CASE
|
||||
WHEN conclusion = 'success' THEN
|
||||
github_actions.workflow_dispatches('your-org', 'your-repo', 'deploy.yml', {
|
||||
'ref': 'main',
|
||||
'inputs': {'sha': head_sha}
|
||||
})
|
||||
ELSE
|
||||
OBJECT_CONSTRUCT('skipped', true, 'reason', 'main branch tests failed')
|
||||
END as deployment_result
|
||||
FROM latest_main_run;
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
All functions return structured responses with error information:
|
||||
|
||||
```sql
|
||||
-- Check for API errors
|
||||
WITH api_response AS (
|
||||
SELECT github_actions.workflows('invalid-org', 'invalid-repo') as response
|
||||
)
|
||||
SELECT
|
||||
response:status_code as status_code,
|
||||
response:error as error_message,
|
||||
response:data as data
|
||||
FROM api_response;
|
||||
```
|
||||
|
||||
Common HTTP status codes:
|
||||
- **200**: Success
|
||||
- **401**: Unauthorized (check token permissions)
|
||||
- **403**: Forbidden (check repository access)
|
||||
- **404**: Not found (check org/repo/workflow names)
|
||||
- **422**: Validation failed (check input parameters)
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
GitHub API has rate limits:
|
||||
- **Personal tokens**: 5,000 requests per hour
|
||||
- **GitHub App tokens**: 5,000 requests per hour per installation
|
||||
- **Search API**: 30 requests per minute
|
||||
|
||||
The functions automatically handle rate limiting through Livequery's retry mechanisms.
|
||||
|
||||
## Security Best Practices
|
||||
|
||||
1. **Use minimal permissions**: Only grant necessary scopes to tokens
|
||||
2. **Rotate tokens regularly**: Set expiration dates and rotate tokens
|
||||
3. **Use GitHub Apps for production**: More secure than personal access tokens
|
||||
4. **Monitor usage**: Track API calls to avoid rate limits
|
||||
5. **Secure storage**: Use proper secrets management for tokens
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
**Authentication Errors (401)**
|
||||
```sql
|
||||
-- Test authentication
|
||||
SELECT github_utils.octocat();
|
||||
-- Should return status_code = 200 if token is valid
|
||||
```
|
||||
|
||||
**Permission Errors (403)**
|
||||
- Ensure token has required scopes (`actions:read` minimum)
|
||||
- Check if repository is accessible to the token owner
|
||||
- For private repos, ensure `repo` scope is granted
|
||||
|
||||
**Workflow Not Found (404)**
|
||||
```sql
|
||||
-- List available workflows first
|
||||
SELECT * FROM TABLE(github_actions.tf_workflows('your-org', 'your-repo'));
|
||||
```
|
||||
|
||||
**Rate Limiting (403 with rate limit message)**
|
||||
- Implement request spacing in your queries
|
||||
- Use pagination parameters to reduce request frequency
|
||||
- Monitor your rate limit status
|
||||
|
||||
### Performance Tips
|
||||
|
||||
1. **Use table functions for analytics**: More efficient for large datasets
|
||||
2. **Implement pagination**: Use `per_page` parameter to control response size
|
||||
3. **Cache results**: Store frequently accessed data in tables
|
||||
4. **Filter at API level**: Use query parameters instead of SQL WHERE clauses
|
||||
5. **Batch operations**: Combine multiple API calls where possible
|
||||
|
||||
## GitHub API Documentation
|
||||
|
||||
- [GitHub REST API](https://docs.github.com/en/rest) - Complete API reference
|
||||
- [Actions API](https://docs.github.com/en/rest/actions) - Actions-specific endpoints
|
||||
- [Authentication](https://docs.github.com/en/rest/overview/authenticating-to-the-rest-api) - Token setup and permissions
|
||||
- [Rate Limiting](https://docs.github.com/en/rest/overview/rate-limits-for-the-rest-api) - API limits and best practices
|
||||
|
||||
## Function Summary
|
||||
|
||||
| Function | Type | Purpose |
|
||||
|----------|------|---------|
|
||||
| `github_utils.octocat()` | UDF | Test API connectivity |
|
||||
| `github_utils.get/post/put()` | UDF | Generic API requests |
|
||||
| `github_actions.workflows()` | UDF | List workflows (JSON) |
|
||||
| `github_actions.runs()` | UDF | List runs (JSON) |
|
||||
| `github_actions.workflow_runs()` | UDF | List workflow runs (JSON) |
|
||||
| `github_actions.workflow_dispatches()` | UDF | Trigger workflows |
|
||||
| `github_actions.workflow_enable/disable()` | UDF | Control workflow state |
|
||||
| `github_actions.*_logs()` | UDF | Retrieve logs |
|
||||
| `github_actions.tf_*()` | UDTF | Structured table data |
|
||||
| `github_actions.tf_failed_jobs_with_logs()` | UDTF | Failed job analysis |
|
||||
|
||||
Ready to monitor and automate your GitHub Actions workflows directly from your data warehouse!
|
||||
@ -8,7 +8,7 @@
|
||||
- [repo, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$[List repository workflows](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#list-repository-workflows).$$
|
||||
sql: |
|
||||
@ -16,13 +16,13 @@
|
||||
{{ utils_schema_name }}.GET(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows'),
|
||||
query
|
||||
):data::OBJECT
|
||||
):data::VARIANT
|
||||
- name: {{ schema_name -}}.workflows
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$[List repository workflows](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#list-repository-workflows).$$
|
||||
sql: |
|
||||
@ -35,7 +35,7 @@
|
||||
- [repo, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Lists all workflow runs for a repository. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-repository).$$
|
||||
sql: |
|
||||
@ -43,13 +43,13 @@
|
||||
{{ utils_schema_name }}.GET(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/runs'),
|
||||
query
|
||||
):data::OBJECT
|
||||
):data::VARIANT
|
||||
- name: {{ schema_name -}}.runs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Lists all workflow runs for a repository. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-repository).$$
|
||||
sql: |
|
||||
@ -63,7 +63,7 @@
|
||||
- [workflow_id, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$
|
||||
sql: |
|
||||
@ -71,14 +71,14 @@
|
||||
{{ utils_schema_name }}.GET(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'runs'),
|
||||
query
|
||||
):data::OBJECT
|
||||
):data::VARIANT
|
||||
- name: {{ schema_name -}}.workflow_runs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [workflow_id, "TEXT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$
|
||||
sql: |
|
||||
@ -92,7 +92,7 @@
|
||||
- [workflow_id, "TEXT"]
|
||||
- [body, "OBJECT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace workflow_id with the workflow file name. For example, you could use main.yaml. [Docs](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#create-a-workflow-dispatch-event).$$
|
||||
sql: |
|
||||
@ -100,7 +100,7 @@
|
||||
{{ utils_schema_name }}.POST(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'dispatches'),
|
||||
COALESCE(body, {'ref': 'main'})::OBJECT
|
||||
)::OBJECT
|
||||
)::VARIANT
|
||||
|
||||
- name: {{ schema_name -}}.workflow_dispatches
|
||||
signature:
|
||||
@ -108,7 +108,7 @@
|
||||
- [repo, "TEXT"]
|
||||
- [workflow_id, "TEXT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace workflow_id with the workflow file name. For example, you could use main.yaml. [Docs](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#create-a-workflow-dispatch-event).$$
|
||||
sql: |
|
||||
@ -121,7 +121,7 @@
|
||||
- [repo, "TEXT"]
|
||||
- [workflow_id, "TEXT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Enables a workflow. You can replace workflow_id with the workflow file name. For example, you could use main.yaml. [Docs](https://docs.github.com/en/rest/reference/actions#enable-a-workflow).$$
|
||||
sql: |
|
||||
@ -129,14 +129,14 @@
|
||||
{{ utils_schema_name }}.PUT(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'enable'),
|
||||
{}
|
||||
)::OBJECT
|
||||
)::VARIANT
|
||||
- name: {{ schema_name -}}.workflow_disable
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [workflow_id, "TEXT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Disables a workflow. You can replace workflow_id with the workflow file name. For example, you could use main.yaml. [Docs](https://docs.github.com/en/rest/reference/actions#disable-a-workflow).$$
|
||||
sql: |
|
||||
@ -144,5 +144,67 @@
|
||||
{{ utils_schema_name }}.PUT(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'disable'),
|
||||
{}
|
||||
)::OBJECT
|
||||
)::VARIANT
|
||||
|
||||
- name: {{ schema_name -}}.workflow_run_logs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [run_id, "TEXT"]
|
||||
return_type:
|
||||
- "TEXT"
|
||||
options: |
|
||||
COMMENT = $$Download workflow run logs as a ZIP archive. Gets a redirect URL to the actual log archive. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#download-workflow-run-logs).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.GET(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/runs', run_id, 'logs'),
|
||||
{}
|
||||
):data::TEXT
|
||||
|
||||
- name: {{ schema_name -}}.job_logs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [job_id, "TEXT"]
|
||||
return_type:
|
||||
- "TEXT"
|
||||
options: |
|
||||
COMMENT = $$Download job logs. Gets the plain text logs for a specific job. [Docs](https://docs.github.com/en/rest/actions/workflow-jobs?apiVersion=2022-11-28#download-job-logs-for-a-workflow-run).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.GET(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/jobs', job_id, 'logs'),
|
||||
{}
|
||||
):data::TEXT
|
||||
|
||||
- name: {{ schema_name -}}.workflow_run_jobs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [run_id, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Lists jobs for a workflow run. [Docs](https://docs.github.com/en/rest/actions/workflow-jobs?apiVersion=2022-11-28#list-jobs-for-a-workflow-run).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.GET(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/runs', run_id, 'jobs'),
|
||||
query
|
||||
):data::VARIANT
|
||||
- name: {{ schema_name -}}.workflow_run_jobs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [run_id, "TEXT"]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Lists jobs for a workflow run. [Docs](https://docs.github.com/en/rest/actions/workflow-jobs?apiVersion=2022-11-28#list-jobs-for-a-workflow-run).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ schema_name -}}.workflow_run_jobs(owner, repo, run_id, {})
|
||||
|
||||
{% endmacro %}
|
||||
@ -166,4 +166,238 @@
|
||||
SELECT *
|
||||
FROM TABLE({{ schema_name -}}.tf_workflow_runs(owner, repo, WORKFLKOW_ID, {}))
|
||||
|
||||
{% endmacro %}
|
||||
- name: {{ schema_name -}}.tf_workflow_run_jobs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [run_id, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "TABLE(id NUMBER, run_id NUMBER, workflow_name STRING, head_branch STRING, run_url STRING, run_attempt NUMBER, node_id STRING, head_sha STRING, url STRING, html_url STRING, status STRING, conclusion STRING, created_at TIMESTAMP, started_at TIMESTAMP, completed_at TIMESTAMP, name STRING, check_run_url STRING, labels VARIANT, runner_id NUMBER, runner_name STRING, runner_group_id NUMBER, runner_group_name STRING, steps VARIANT)"
|
||||
options: |
|
||||
COMMENT = $$Lists jobs for a workflow run as a table. [Docs](https://docs.github.com/en/rest/actions/workflow-jobs?apiVersion=2022-11-28#list-jobs-for-a-workflow-run).$$
|
||||
sql: |
|
||||
WITH response AS
|
||||
(
|
||||
SELECT
|
||||
github_actions.workflow_run_jobs(OWNER, REPO, RUN_ID, QUERY) AS response
|
||||
)
|
||||
SELECT
|
||||
value:id::NUMBER AS id
|
||||
,value:run_id::NUMBER AS run_id
|
||||
,value:workflow_name::STRING AS workflow_name
|
||||
,value:head_branch::STRING AS head_branch
|
||||
,value:run_url::STRING AS run_url
|
||||
,value:run_attempt::NUMBER AS run_attempt
|
||||
,value:node_id::STRING AS node_id
|
||||
,value:head_sha::STRING AS head_sha
|
||||
,value:url::STRING AS url
|
||||
,value:html_url::STRING AS html_url
|
||||
,value:status::STRING AS status
|
||||
,value:conclusion::STRING AS conclusion
|
||||
,value:created_at::TIMESTAMP AS created_at
|
||||
,value:started_at::TIMESTAMP AS started_at
|
||||
,value:completed_at::TIMESTAMP AS completed_at
|
||||
,value:name::STRING AS name
|
||||
,value:check_run_url::STRING AS check_run_url
|
||||
,value:labels::VARIANT AS labels
|
||||
,value:runner_id::NUMBER AS runner_id
|
||||
,value:runner_name::STRING AS runner_name
|
||||
,value:runner_group_id::NUMBER AS runner_group_id
|
||||
,value:runner_group_name::STRING AS runner_group_name
|
||||
,value:steps::VARIANT AS steps
|
||||
FROM response, LATERAL FLATTEN( input=> response:jobs)
|
||||
|
||||
- name: {{ schema_name -}}.tf_workflow_run_jobs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [run_id, "TEXT"]
|
||||
return_type:
|
||||
- "TABLE(id NUMBER, run_id NUMBER, workflow_name STRING, head_branch STRING, run_url STRING, run_attempt NUMBER, node_id STRING, head_sha STRING, url STRING, html_url STRING, status STRING, conclusion STRING, created_at TIMESTAMP, started_at TIMESTAMP, completed_at TIMESTAMP, name STRING, check_run_url STRING, labels VARIANT, runner_id NUMBER, runner_name STRING, runner_group_id NUMBER, runner_group_name STRING, steps VARIANT)"
|
||||
options: |
|
||||
COMMENT = $$Lists jobs for a workflow run as a table. [Docs](https://docs.github.com/en/rest/actions/workflow-jobs?apiVersion=2022-11-28#list-jobs-for-a-workflow-run).$$
|
||||
sql: |
|
||||
SELECT *
|
||||
FROM TABLE({{ schema_name -}}.tf_workflow_run_jobs(owner, repo, run_id, {}))
|
||||
|
||||
- name: {{ schema_name -}}.tf_failed_jobs_with_logs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [run_id, "TEXT"]
|
||||
return_type:
|
||||
- "TABLE(run_id STRING, job_id NUMBER, job_name STRING, job_status STRING, job_conclusion STRING, job_url STRING, failed_steps VARIANT, logs TEXT, failed_step_logs ARRAY)"
|
||||
options: |
|
||||
COMMENT = $$Gets failed jobs for a workflow run with their complete logs. Combines job info with log content for analysis.$$
|
||||
sql: |
|
||||
WITH failed_jobs AS (
|
||||
SELECT
|
||||
run_id::STRING AS run_id,
|
||||
id AS job_id,
|
||||
name AS job_name,
|
||||
status AS job_status,
|
||||
conclusion AS job_conclusion,
|
||||
html_url AS job_url,
|
||||
steps AS failed_steps
|
||||
FROM TABLE({{ schema_name -}}.tf_workflow_run_jobs(owner, repo, run_id))
|
||||
WHERE conclusion = 'failure'
|
||||
),
|
||||
jobs_with_logs AS (
|
||||
SELECT
|
||||
run_id,
|
||||
job_id,
|
||||
job_name,
|
||||
job_status,
|
||||
job_conclusion,
|
||||
job_url,
|
||||
failed_steps,
|
||||
{{ schema_name -}}.job_logs(owner, repo, job_id::TEXT) AS logs
|
||||
FROM failed_jobs
|
||||
),
|
||||
error_sections AS (
|
||||
SELECT
|
||||
run_id,
|
||||
job_id,
|
||||
job_name,
|
||||
job_status,
|
||||
job_conclusion,
|
||||
job_url,
|
||||
failed_steps,
|
||||
logs,
|
||||
ARRAY_AGG(section.value) AS failed_step_logs
|
||||
FROM jobs_with_logs,
|
||||
LATERAL FLATTEN(INPUT => SPLIT(logs, '##[group]')) section
|
||||
WHERE CONTAINS(section.value, '##[error]')
|
||||
GROUP BY run_id, job_id, job_name, job_status, job_conclusion, job_url, failed_steps, logs
|
||||
)
|
||||
SELECT
|
||||
run_id,
|
||||
job_id,
|
||||
job_name,
|
||||
job_status,
|
||||
job_conclusion,
|
||||
job_url,
|
||||
failed_steps,
|
||||
logs,
|
||||
COALESCE(failed_step_logs, ARRAY_CONSTRUCT()) AS failed_step_logs
|
||||
FROM jobs_with_logs
|
||||
LEFT JOIN error_sections USING (run_id, job_id)
|
||||
|
||||
- name: {{ schema_name -}}.tf_failure_analysis_with_ai
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [run_id, "TEXT"]
|
||||
- [enable_ai, "BOOLEAN"]
|
||||
- [ai_provider, "TEXT"]
|
||||
- [groq_api_key, "TEXT"]
|
||||
- [groq_model, "TEXT"]
|
||||
return_type:
|
||||
- "TABLE(run_id STRING, ai_analysis STRING, total_failures NUMBER, failure_metadata VARIANT)"
|
||||
options: |
|
||||
COMMENT = $$Gets GitHub Actions failure analysis with configurable AI providers (cortex, claude, groq) for Slack notifications.$$
|
||||
sql: |
|
||||
WITH failure_data AS (
|
||||
SELECT
|
||||
run_id,
|
||||
COUNT(*) as total_failures,
|
||||
ARRAY_AGG(OBJECT_CONSTRUCT(
|
||||
'run_id', run_id,
|
||||
'job_name', job_name,
|
||||
'job_id', job_id,
|
||||
'job_url', job_url,
|
||||
'error_sections', ARRAY_SIZE(failed_step_logs),
|
||||
'logs_preview', SUBSTR(ARRAY_TO_STRING(failed_step_logs, '\n'), 1, 500)
|
||||
)) as failure_metadata,
|
||||
CASE
|
||||
WHEN NOT enable_ai THEN NULL
|
||||
WHEN LOWER(COALESCE(ai_provider, 'cortex')) = 'cortex' THEN
|
||||
snowflake.cortex.complete(
|
||||
'mistral-large',
|
||||
CONCAT(
|
||||
'Analyze these ', COUNT(*), ' GitHub Actions failures for run ', run_id, ' and provide:\n',
|
||||
'1. Common failure patterns\n',
|
||||
'2. Root cause analysis\n',
|
||||
'3. Prioritized action items\n\n',
|
||||
LISTAGG(
|
||||
CONCAT(
|
||||
'Job: ', job_name, '\n',
|
||||
'Job ID: ', job_id, '\n',
|
||||
'Run ID: ', run_id, '\n',
|
||||
'Error: ', ARRAY_TO_STRING(failed_step_logs, '\n')
|
||||
),
|
||||
'\n\n---\n\n'
|
||||
) WITHIN GROUP (ORDER BY job_name)
|
||||
)
|
||||
)
|
||||
WHEN LOWER(ai_provider) = 'claude' THEN
|
||||
(
|
||||
SELECT COALESCE(
|
||||
response:content[0]:text::STRING,
|
||||
response:error:message::STRING,
|
||||
'Claude analysis failed'
|
||||
)
|
||||
FROM (
|
||||
SELECT claude.post_messages(
|
||||
ARRAY_CONSTRUCT(
|
||||
OBJECT_CONSTRUCT(
|
||||
'role', 'user',
|
||||
'content', CONCAT(
|
||||
'Analyze these ', COUNT(*), ' GitHub Actions failures for run ', run_id, ' and provide:\n',
|
||||
'1. Common failure patterns\n',
|
||||
'2. Root cause analysis\n',
|
||||
'3. Prioritized action items\n\n',
|
||||
LISTAGG(
|
||||
CONCAT(
|
||||
'Job: ', job_name, '\n',
|
||||
'Job ID: ', job_id, '\n',
|
||||
'Run ID: ', run_id, '\n',
|
||||
'Error: ', SUBSTR(ARRAY_TO_STRING(failed_step_logs, '\n'), 1, 2000)
|
||||
),
|
||||
'\n\n---\n\n'
|
||||
) WITHIN GROUP (ORDER BY job_name)
|
||||
)
|
||||
)
|
||||
)
|
||||
) as response
|
||||
)
|
||||
)
|
||||
WHEN LOWER(ai_provider) = 'groq' THEN
|
||||
(
|
||||
SELECT groq.extract_response_text(
|
||||
groq.quick_chat(
|
||||
CONCAT(
|
||||
'Analyze these ', COUNT(*), ' GitHub Actions failures for run ', run_id, ' and provide:\n',
|
||||
'1. Common failure patterns\n',
|
||||
'2. Root cause analysis\n',
|
||||
'3. Prioritized action items\n\n',
|
||||
LISTAGG(
|
||||
CONCAT(
|
||||
'Job: ', job_name, '\n',
|
||||
'Job ID: ', job_id, '\n',
|
||||
'Run ID: ', run_id, '\n',
|
||||
'Error: ', SUBSTR(ARRAY_TO_STRING(failed_step_logs, '\n'), 1, 2000)
|
||||
),
|
||||
'\n\n---\n\n'
|
||||
) WITHIN GROUP (ORDER BY job_name)
|
||||
),
|
||||
groq_api_key,
|
||||
COALESCE(groq_model, 'llama3-8b-8192')
|
||||
)
|
||||
)
|
||||
)
|
||||
ELSE
|
||||
CONCAT('Unsupported AI provider: ', COALESCE(ai_provider, 'null'))
|
||||
END as ai_analysis
|
||||
FROM TABLE({{ schema_name -}}.tf_failed_jobs_with_logs(owner, repo, run_id))
|
||||
GROUP BY run_id, enable_ai, ai_provider, groq_api_key, groq_model
|
||||
)
|
||||
SELECT
|
||||
run_id::STRING,
|
||||
ai_analysis::STRING,
|
||||
total_failures,
|
||||
failure_metadata
|
||||
FROM failure_data
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
265
macros/marketplace/groq/README.md
Normal file
265
macros/marketplace/groq/README.md
Normal file
@ -0,0 +1,265 @@
|
||||
# Groq API Integration
|
||||
|
||||
This directory contains Snowflake UDFs for integrating with the Groq API, providing fast inference with various open-source language models.
|
||||
|
||||
## Available Models
|
||||
|
||||
- **llama3-8b-8192**: Meta Llama 3 8B model with 8K context (Very Fast)
|
||||
- **llama3-70b-8192**: Meta Llama 3 70B model with 8K context (Fast, better quality)
|
||||
- **gemma-7b-it**: Google Gemma 7B instruction-tuned (Instruction following)
|
||||
|
||||
**Note**: Check [Groq's documentation](https://console.groq.com/docs/models) for the latest available models, or query the live model list with:
|
||||
|
||||
```sql
|
||||
-- Get current list of available models
|
||||
SELECT groq_utils.list_models();
|
||||
|
||||
-- Get details about a specific model
|
||||
SELECT groq_utils.get_model_info('llama3-8b-8192');
|
||||
```
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Groq API key from [https://console.groq.com/keys](https://console.groq.com/keys)
|
||||
|
||||
2. Store the API key in Snowflake secrets:
|
||||
- **System users**: Store under `_FSC_SYS/GROQ`
|
||||
- **Regular users**: Store under `vault/groq/api`
|
||||
|
||||
3. Deploy the Groq marketplace functions:
|
||||
```bash
|
||||
dbt run --models groq__ groq_utils__groq_utils
|
||||
```
|
||||
|
||||
**Note**: Groq functions automatically use the appropriate secret path based on your user type.
|
||||
|
||||
## Functions
|
||||
|
||||
### `groq.chat_completions(messages, [model], [max_tokens], [temperature], [top_p], [frequency_penalty], [presence_penalty])`
|
||||
|
||||
Send messages to Groq for chat completion.
|
||||
|
||||
### `groq.quick_chat(user_message, [system_message])`
|
||||
|
||||
Quick single or system+user message chat.
|
||||
|
||||
**Note**: All functions use the `GROQ_API_KEY` environment variable for authentication.
|
||||
|
||||
### `groq.extract_response_text(groq_response)`
|
||||
|
||||
Extract text content from Groq API responses.
|
||||
|
||||
### `groq_utils.post(path, body)`
|
||||
|
||||
Low-level HTTP POST to Groq API endpoints.
|
||||
|
||||
### `groq_utils.get(path)`
|
||||
|
||||
Low-level HTTP GET to Groq API endpoints.
|
||||
|
||||
### `groq_utils.list_models()`
|
||||
|
||||
List all available models from Groq API.
|
||||
|
||||
### `groq_utils.get_model_info(model_id)`
|
||||
|
||||
Get information about a specific model.
|
||||
|
||||
## Examples
|
||||
|
||||
### Basic Chat
|
||||
```sql
|
||||
-- Simple chat with default model (llama3-8b-8192)
|
||||
SELECT groq.chat_completions(
|
||||
[{'role': 'user', 'content': 'Explain quantum computing in simple terms'}]
|
||||
);
|
||||
|
||||
-- Quick chat shorthand
|
||||
SELECT groq.quick_chat('What is the capital of France?');
|
||||
```
|
||||
|
||||
### Chat with System Prompt
|
||||
```sql
|
||||
-- Chat with system prompt using quick_chat
|
||||
SELECT groq.quick_chat(
|
||||
'You are a helpful Python programming assistant.',
|
||||
'How do I create a list comprehension?'
|
||||
);
|
||||
|
||||
-- Full chat_completions with system message
|
||||
SELECT groq.chat_completions(
|
||||
[
|
||||
{'role': 'system', 'content': 'You are a data scientist expert.'},
|
||||
{'role': 'user', 'content': 'Explain the difference between supervised and unsupervised learning'}
|
||||
]
|
||||
);
|
||||
```
|
||||
|
||||
### Different Models
|
||||
```sql
|
||||
-- Use the larger, more capable model
|
||||
SELECT groq.chat_completions(
|
||||
[{'role': 'user', 'content': 'Write a Python function to calculate fibonacci numbers'}],
|
||||
'llama3-70b-8192',
|
||||
500 -- max_tokens
|
||||
);
|
||||
|
||||
-- Use the larger model for better quality
|
||||
SELECT groq.chat_completions(
|
||||
[{'role': 'user', 'content': 'Analyze this complex problem...'}],
|
||||
'llama3-70b-8192'
|
||||
);
|
||||
```
|
||||
|
||||
### Custom Parameters
|
||||
```sql
|
||||
-- Fine-tune response generation
|
||||
SELECT groq.chat_completions(
|
||||
[{'role': 'user', 'content': 'Generate creative story ideas'}],
|
||||
'llama3-8b-8192', -- model
|
||||
300, -- max_tokens
|
||||
0.8, -- temperature (more creative)
|
||||
0.9, -- top_p
|
||||
0.1, -- frequency_penalty (reduce repetition)
|
||||
0.1 -- presence_penalty (encourage new topics)
|
||||
);
|
||||
```
|
||||
|
||||
|
||||
### Extract Response Text
|
||||
```sql
|
||||
-- Get just the text content from API response
|
||||
WITH chat_response AS (
|
||||
SELECT groq.quick_chat('Hello, how are you?') as response
|
||||
)
|
||||
SELECT groq.extract_response_text(response) as message_text
|
||||
FROM chat_response;
|
||||
```
|
||||
|
||||
### Conversational Chat
|
||||
```sql
|
||||
-- Multi-turn conversation
|
||||
SELECT groq.chat_completions([
|
||||
{'role': 'system', 'content': 'You are a helpful coding assistant.'},
|
||||
{'role': 'user', 'content': 'I need help with SQL queries'},
|
||||
{'role': 'assistant', 'content': 'I\'d be happy to help with SQL! What specific query are you working on?'},
|
||||
{'role': 'user', 'content': 'How do I join two tables with a LEFT JOIN?'}
|
||||
]);
|
||||
```
|
||||
|
||||
### Model Comparison
|
||||
```sql
|
||||
-- Compare responses from different models
|
||||
WITH responses AS (
|
||||
SELECT
|
||||
'llama3-8b-8192' as model,
|
||||
groq.extract_response_text(
|
||||
groq.chat_completions([{'role': 'user', 'content': 'Explain machine learning'}], 'llama3-8b-8192', 100)
|
||||
) as response
|
||||
UNION ALL
|
||||
SELECT
|
||||
'llama3-70b-8192' as model,
|
||||
groq.extract_response_text(
|
||||
groq.chat_completions([{'role': 'user', 'content': 'Explain machine learning'}], 'llama3-70b-8192', 100)
|
||||
) as response
|
||||
)
|
||||
SELECT * FROM responses;
|
||||
```
|
||||
|
||||
### Batch Processing
|
||||
```sql
|
||||
-- Process multiple questions
|
||||
WITH questions AS (
|
||||
SELECT * FROM VALUES
|
||||
('What is Python?'),
|
||||
('What is JavaScript?'),
|
||||
('What is SQL?')
|
||||
AS t(question)
|
||||
)
|
||||
SELECT
|
||||
question,
|
||||
groq.extract_response_text(
|
||||
groq.quick_chat(question, 'You are a programming tutor.')
|
||||
) as answer
|
||||
FROM questions;
|
||||
```
|
||||
|
||||
### Get Available Models
|
||||
```sql
|
||||
-- List all available models with details
|
||||
SELECT
|
||||
model.value:id::STRING as model_id,
|
||||
model.value:object::STRING as object_type,
|
||||
model.value:created::INTEGER as created_timestamp,
|
||||
model.value:owned_by::STRING as owned_by
|
||||
FROM (
|
||||
SELECT groq_utils.list_models() as response
|
||||
),
|
||||
LATERAL FLATTEN(input => response:data) as model
|
||||
ORDER BY model_id;
|
||||
|
||||
-- Check if a specific model is available
|
||||
WITH models AS (
|
||||
SELECT groq_utils.list_models() as response
|
||||
)
|
||||
SELECT
|
||||
CASE
|
||||
WHEN ARRAY_CONTAINS('llama3-70b-8192'::VARIANT, response:data[*]:id)
|
||||
THEN 'Model is available'
|
||||
ELSE 'Model not found'
|
||||
END as availability
|
||||
FROM models;
|
||||
```
|
||||
|
||||
### GitHub Actions Integration Example
|
||||
```sql
|
||||
-- Example of how this is used in GitHub Actions failure analysis
|
||||
SELECT
|
||||
run_id,
|
||||
groq.extract_response_text(
|
||||
groq.quick_chat(
|
||||
CONCAT('Analyze this failure: Job=', job_name, ' Error=', error_logs),
|
||||
'You are analyzing CI/CD failures. Provide concise root cause analysis.'
|
||||
)
|
||||
) as ai_analysis
|
||||
FROM my_failed_jobs
|
||||
WHERE run_id = '12345678';
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
The functions include built-in error handling. Check for errors in responses:
|
||||
|
||||
```sql
|
||||
WITH response AS (
|
||||
SELECT groq.quick_chat('Hello') as result
|
||||
)
|
||||
SELECT
|
||||
CASE
|
||||
WHEN result:error IS NOT NULL THEN result:error:message::STRING
|
||||
ELSE groq.extract_response_text(result)
|
||||
END as final_response
|
||||
FROM response;
|
||||
```
|
||||
|
||||
## Performance Tips
|
||||
|
||||
1. **Model Selection**: Use `llama3-8b-8192` for fast, simple tasks and `llama3-70b-8192` for complex reasoning
|
||||
2. **Token Limits**: Set appropriate `max_tokens` to control costs and response length
|
||||
3. **Temperature**: Use lower values (0.1-0.3) for factual tasks, higher (0.7-1.0) for creative tasks
|
||||
4. **Stay Updated**: Check Groq's model documentation regularly as they add new models and deprecate others
|
||||
|
||||
## Integration with GitHub Actions
|
||||
|
||||
This Groq integration is used by the GitHub Actions failure analysis system in `slack_notify` macro:
|
||||
|
||||
```sql
|
||||
-- In your GitHub Actions workflow
|
||||
dbt run-operation slack_notify --vars '{
|
||||
"owner": "your-org",
|
||||
"repo": "your-repo",
|
||||
"run_id": "12345678",
|
||||
"ai_provider": "groq",
|
||||
"enable_ai_analysis": true
|
||||
}'
|
||||
```
|
||||
55
macros/marketplace/groq/chat_udfs.yaml.sql
Normal file
55
macros/marketplace/groq/chat_udfs.yaml.sql
Normal file
@ -0,0 +1,55 @@
|
||||
{% macro config_groq_chat_udfs(schema_name = "groq", utils_schema_name = "groq_utils") -%}
|
||||
{#
|
||||
This macro is used to generate API calls to Groq chat completion endpoints
|
||||
#}
|
||||
|
||||
- name: {{ schema_name -}}.chat_completions
|
||||
signature:
|
||||
- [MESSAGES, ARRAY, Array of message objects]
|
||||
- [MODEL, STRING, The model to use (optional, defaults to llama3-8b-8192)]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Send messages to Groq and get a chat completion response with optional model selection [API docs: Chat Completions](https://console.groq.com/docs/api-reference#chat-completions)$$
|
||||
sql: |
|
||||
SELECT groq_utils.post(
|
||||
'/openai/v1/chat/completions',
|
||||
{
|
||||
'model': COALESCE(MODEL, 'llama3-8b-8192'),
|
||||
'messages': MESSAGES,
|
||||
'max_tokens': 1024,
|
||||
'temperature': 0.1
|
||||
}
|
||||
) as response
|
||||
|
||||
- name: {{ schema_name -}}.quick_chat
|
||||
signature:
|
||||
- [USER_MESSAGE, STRING, The user message to send]
|
||||
- [MODEL, STRING, The model to use (optional, defaults to llama3-8b-8192)]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Quick single message chat with Groq using optional model selection$$
|
||||
sql: |
|
||||
SELECT {{ schema_name }}.chat_completions(
|
||||
ARRAY_CONSTRUCT(
|
||||
OBJECT_CONSTRUCT('role', 'user', 'content', USER_MESSAGE)
|
||||
),
|
||||
MODEL
|
||||
) as response
|
||||
|
||||
- name: {{ schema_name -}}.extract_response_text
|
||||
signature:
|
||||
- [GROQ_RESPONSE, VARIANT, The response object from Groq API]
|
||||
return_type:
|
||||
- "STRING"
|
||||
options: |
|
||||
COMMENT = $$Extract the text content from a Groq chat completion response$$
|
||||
sql: |
|
||||
SELECT COALESCE(
|
||||
GROQ_RESPONSE:choices[0]:message:content::STRING,
|
||||
GROQ_RESPONSE:error:message::STRING,
|
||||
'No response available'
|
||||
)
|
||||
|
||||
{% endmacro %}
|
||||
64
macros/marketplace/groq/utils_udfs.yaml.sql
Normal file
64
macros/marketplace/groq/utils_udfs.yaml.sql
Normal file
@ -0,0 +1,64 @@
|
||||
{% macro config_groq_utils_udfs(schema_name = "groq_utils", utils_schema_name = "groq_utils") -%}
|
||||
{#
|
||||
This macro is used to generate API calls to Groq API endpoints
|
||||
#}
|
||||
- name: {{ schema_name -}}.post
|
||||
signature:
|
||||
- [PATH, STRING, The API endpoint path]
|
||||
- [BODY, OBJECT, The request body]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Make POST requests to Groq API [API docs: Groq](https://console.groq.com/docs/api-reference)$$
|
||||
sql: |
|
||||
SELECT live.udf_api(
|
||||
'POST',
|
||||
CONCAT('https://api.groq.com', PATH),
|
||||
{
|
||||
'Authorization': 'Bearer {API_KEY}',
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
BODY,
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GROQ', 'Vault/prod/livequery/groq')
|
||||
) as response
|
||||
|
||||
- name: {{ schema_name -}}.get
|
||||
signature:
|
||||
- [PATH, STRING, The API endpoint path]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Make GET requests to Groq API [API docs: Groq](https://console.groq.com/docs/api-reference)$$
|
||||
sql: |
|
||||
SELECT live.udf_api(
|
||||
'GET',
|
||||
CONCAT('https://api.groq.com', PATH),
|
||||
{
|
||||
'Authorization': 'Bearer {API_KEY}',
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
NULL,
|
||||
IFF(_utils.udf_whoami() <> CURRENT_USER(), '_FSC_SYS/GROQ', 'Vault/prod/livequery/groq')
|
||||
) as response
|
||||
|
||||
- name: {{ schema_name -}}.list_models
|
||||
signature:
|
||||
- []
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$List available models from Groq API$$
|
||||
sql: |
|
||||
SELECT {{ schema_name }}.get('/openai/v1/models')
|
||||
|
||||
- name: {{ schema_name -}}.get_model_info
|
||||
signature:
|
||||
- [MODEL_ID, STRING, The model ID to get info for]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Get information about a specific model$$
|
||||
sql: |
|
||||
SELECT {{ schema_name }}.get('/openai/v1/models/' || MODEL_ID)
|
||||
|
||||
{% endmacro %}
|
||||
44
macros/marketplace/helius/README.md
Normal file
44
macros/marketplace/helius/README.md
Normal file
@ -0,0 +1,44 @@
|
||||
# Helius API Integration
|
||||
|
||||
Helius provides high-performance Solana RPC infrastructure and enhanced APIs for accessing Solana blockchain data, including DAS (Digital Asset Standard) APIs.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Helius API key from [Helius Dashboard](https://dashboard.helius.dev/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/HELIUS`
|
||||
|
||||
3. Deploy the Helius marketplace functions:
|
||||
```bash
|
||||
dbt run --models helius__ helius_utils__helius_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `helius.get(path, query_args)`
|
||||
Make GET requests to Helius API endpoints.
|
||||
|
||||
### `helius.post(path, body)`
|
||||
Make POST requests to Helius API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get Solana account info
|
||||
SELECT helius.post('/rpc', {
|
||||
'jsonrpc': '2.0',
|
||||
'method': 'getAccountInfo',
|
||||
'params': ['account_address'],
|
||||
'id': 1
|
||||
});
|
||||
|
||||
-- Get compressed NFTs by owner
|
||||
SELECT helius.get('/v0/addresses/owner_address/nfts', {'compressed': true});
|
||||
|
||||
-- Get transaction history
|
||||
SELECT helius.get('/v0/addresses/address/transactions', {'limit': 100});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Helius API Documentation](https://docs.helius.dev/)
|
||||
36
macros/marketplace/nftscan/README.md
Normal file
36
macros/marketplace/nftscan/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# NFTScan API Integration
|
||||
|
||||
NFTScan is a professional NFT data infrastructure platform providing comprehensive NFT APIs for accessing NFT metadata, transactions, and market data across multiple blockchains.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your NFTScan API key from [NFTScan Developer Portal](https://developer.nftscan.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/NFTSCAN`
|
||||
|
||||
3. Deploy the NFTScan marketplace functions:
|
||||
```bash
|
||||
dbt run --models nftscan__ nftscan_utils__nftscan_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `nftscan.get(path, query_args)`
|
||||
Make GET requests to NFTScan API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get NFT collection statistics
|
||||
SELECT nftscan.get('/api/v2/statistics/collection/eth/0x...', {});
|
||||
|
||||
-- Get NFTs owned by an address
|
||||
SELECT nftscan.get('/api/v2/account/own/eth/0x...', {'show_attribute': 'true', 'limit': 100});
|
||||
|
||||
-- Get NFT transaction history
|
||||
SELECT nftscan.get('/api/v2/transactions/account/eth/0x...', {'event_type': 'Sale', 'limit': 50});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [NFTScan API Documentation](https://developer.nftscan.com/)
|
||||
39
macros/marketplace/opensea/README.md
Normal file
39
macros/marketplace/opensea/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# OpenSea API Integration
|
||||
|
||||
OpenSea is the world's largest NFT marketplace, providing APIs for accessing NFT collections, listings, sales data, and marketplace activities.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your OpenSea API key from [OpenSea Developer Portal](https://docs.opensea.io/reference/api-keys)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/OPENSEA`
|
||||
|
||||
3. Deploy the OpenSea marketplace functions:
|
||||
```bash
|
||||
dbt run --models opensea__ opensea_utils__opensea_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `opensea.get(path, query_args)`
|
||||
Make GET requests to OpenSea API endpoints.
|
||||
|
||||
### `opensea.post(path, body)`
|
||||
Make POST requests to OpenSea API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get NFT collection stats
|
||||
SELECT opensea.get('/api/v2/collections/boredapeyachtclub/stats', {});
|
||||
|
||||
-- Get NFT listings
|
||||
SELECT opensea.get('/api/v2/orders/ethereum/seaport/listings', {'limit': 20});
|
||||
|
||||
-- Get collection events
|
||||
SELECT opensea.get('/api/v2/events/collection/boredapeyachtclub', {'event_type': 'sale'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [OpenSea API Documentation](https://docs.opensea.io/reference/api-overview)
|
||||
39
macros/marketplace/playgrounds/README.md
Normal file
39
macros/marketplace/playgrounds/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Playgrounds API Integration
|
||||
|
||||
Playgrounds provides gaming and entertainment data APIs with access to game statistics, player data, and gaming platform analytics.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Playgrounds API key from [Playgrounds Developer Portal](https://playgrounds.com/developers)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/PLAYGROUNDS`
|
||||
|
||||
3. Deploy the Playgrounds marketplace functions:
|
||||
```bash
|
||||
dbt run --models playgrounds__ playgrounds_utils__playgrounds_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `playgrounds.get(path, query_args)`
|
||||
Make GET requests to Playgrounds API endpoints.
|
||||
|
||||
### `playgrounds.post(path, body)`
|
||||
Make POST requests to Playgrounds API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get game statistics
|
||||
SELECT playgrounds.get('/api/v1/games/stats', {'game_id': 'fortnite'});
|
||||
|
||||
-- Get player rankings
|
||||
SELECT playgrounds.get('/api/v1/leaderboards', {'game': 'valorant', 'region': 'na'});
|
||||
|
||||
-- Get tournament data
|
||||
SELECT playgrounds.get('/api/v1/tournaments', {'status': 'active', 'limit': 50});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Playgrounds API Documentation](https://docs.playgrounds.com/)
|
||||
44
macros/marketplace/quicknode/README.md
Normal file
44
macros/marketplace/quicknode/README.md
Normal file
@ -0,0 +1,44 @@
|
||||
# QuickNode API Integration
|
||||
|
||||
QuickNode provides high-performance blockchain infrastructure with RPC endpoints and enhanced APIs for Ethereum, Polygon, Solana, and other networks.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your QuickNode endpoint and API key from [QuickNode Dashboard](https://dashboard.quicknode.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/QUICKNODE`
|
||||
|
||||
3. Deploy the QuickNode marketplace functions:
|
||||
```bash
|
||||
dbt run --models quicknode__ quicknode_utils__quicknode_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `quicknode.get(path, query_args)`
|
||||
Make GET requests to QuickNode API endpoints.
|
||||
|
||||
### `quicknode.post(path, body)`
|
||||
Make POST requests to QuickNode API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get latest block number
|
||||
SELECT quicknode.post('/rpc', {
|
||||
'jsonrpc': '2.0',
|
||||
'method': 'eth_blockNumber',
|
||||
'params': [],
|
||||
'id': 1
|
||||
});
|
||||
|
||||
-- Get NFT metadata
|
||||
SELECT quicknode.get('/nft/v1/ethereum/nft/0x.../1', {});
|
||||
|
||||
-- Get token transfers
|
||||
SELECT quicknode.get('/token/v1/ethereum/transfers', {'address': '0x...', 'limit': 100});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [QuickNode API Documentation](https://www.quicknode.com/docs/)
|
||||
39
macros/marketplace/reservoir/README.md
Normal file
39
macros/marketplace/reservoir/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Reservoir API Integration
|
||||
|
||||
Reservoir provides comprehensive NFT data infrastructure with APIs for accessing real-time NFT market data, collections, sales, and aggregated marketplace information.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Reservoir API key from [Reservoir Dashboard](https://reservoir.tools/dashboard)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/RESERVOIR`
|
||||
|
||||
3. Deploy the Reservoir marketplace functions:
|
||||
```bash
|
||||
dbt run --models reservoir__ reservoir_utils__reservoir_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `reservoir.get(path, query_args)`
|
||||
Make GET requests to Reservoir API endpoints.
|
||||
|
||||
### `reservoir.post(path, body)`
|
||||
Make POST requests to Reservoir API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get collection floor prices
|
||||
SELECT reservoir.get('/collections/v7', {'id': '0x...', 'includeTopBid': 'true'});
|
||||
|
||||
-- Get recent sales
|
||||
SELECT reservoir.get('/sales/v6', {'collection': '0x...', 'limit': 100});
|
||||
|
||||
-- Get token details
|
||||
SELECT reservoir.get('/tokens/v7', {'collection': '0x...', 'tokenId': '1234'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Reservoir API Documentation](https://docs.reservoir.tools/)
|
||||
514
macros/marketplace/slack/README.md
Normal file
514
macros/marketplace/slack/README.md
Normal file
@ -0,0 +1,514 @@
|
||||
# Slack Integration for Livequery
|
||||
|
||||
A straightforward Slack integration that lets you send exactly what you want to Slack. You construct the payload according to Slack's API spec, and Livequery delivers it.
|
||||
|
||||
## Prerequisites & Setup
|
||||
|
||||
### Option 1: Webhook Mode (Simpler, No Threading)
|
||||
|
||||
**When to use:** Simple notifications without threading support.
|
||||
|
||||
**Setup Steps:**
|
||||
1. Go to [Slack Apps](https://api.slack.com/apps) and create a new app
|
||||
2. Choose "From scratch" and select your workspace
|
||||
3. Go to "Incoming Webhooks" and toggle "Activate Incoming Webhooks" to On
|
||||
4. Click "Add New Webhook to Workspace"
|
||||
5. Select the channel and click "Allow"
|
||||
6. Copy the webhook URL (starts with `https://hooks.slack.com/services/...`)
|
||||
7. Use `slack.webhook_send(url, payload)`
|
||||
|
||||
**Limitations:**
|
||||
- ❌ No threading support (cannot use `slack.post_reply()`)
|
||||
- ❌ Cannot send to different channels dynamically
|
||||
- ✅ Simple setup, no bot permissions needed
|
||||
|
||||
### Option 2: Web API Mode (Full Features + Threading)
|
||||
|
||||
**When to use:** Need threading support, multiple channels, or advanced features.
|
||||
|
||||
**Setup Steps:**
|
||||
1. Go to [Slack Apps](https://api.slack.com/apps) and create a new app
|
||||
2. Choose "From scratch" and select your workspace
|
||||
3. Go to "OAuth & Permissions" in the sidebar
|
||||
4. Under "Scopes" → "Bot Token Scopes", add these permissions:
|
||||
- `chat:write` - Send messages
|
||||
- `channels:read` - Access public channel information
|
||||
- `groups:read` - Access private channel information (if needed)
|
||||
5. Click "Install to Workspace" at the top
|
||||
6. Click "Allow" to grant permissions
|
||||
7. Copy the "Bot User OAuth Token" (starts with `xoxb-...`)
|
||||
8. **Important:** Invite the bot to your channel:
|
||||
- Go to your Slack channel
|
||||
- Type `/invite @YourBotName` (replace with your bot's name)
|
||||
- Or go to channel settings → Integrations → Add apps → Select your bot
|
||||
9. Get the channel ID:
|
||||
- Right-click your channel name → "Copy Link"
|
||||
- Extract the ID from URL: `https://yourworkspace.slack.com/archives/C087GJQ1ZHQ` → `C087GJQ1ZHQ`
|
||||
10. Use `slack.post_message(token, channel, payload)` and `slack.post_reply()` for threading
|
||||
|
||||
**Features:**
|
||||
- ✅ Threading support with `slack.post_reply()`
|
||||
- ✅ Send to any channel the bot is invited to
|
||||
- ✅ More control and flexibility
|
||||
- ❌ Requires bot setup and channel invitations
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. Add to dbt_project.yml (Recommended)
|
||||
|
||||
The easiest way to get Slack notifications for your entire dbt project:
|
||||
|
||||
```yaml
|
||||
# dbt_project.yml
|
||||
on-run-end:
|
||||
- "{{ slack_notify_on_run_end(results) }}"
|
||||
```
|
||||
|
||||
Then configure individual models with Slack settings (see Per-Model Configuration below).
|
||||
|
||||
**How it works:**
|
||||
- ✅ **Per-model notifications** - Each model controls its own Slack settings
|
||||
- ✅ **Custom message formats** - Models can define completely custom Slack payloads
|
||||
- ✅ **Flexible triggers** - Different models can notify on success, error, or both
|
||||
- ✅ **Variable substitution** - Use `{model_name}`, `{status}`, `{execution_time}` in custom messages
|
||||
- ✅ **Environment overrides** - Models can override global Slack webhook/channel settings
|
||||
- ✅ **Default fallback** - Models without config are ignored (no spam)
|
||||
|
||||
### 2. Per-Model Configuration
|
||||
|
||||
Configure Slack notifications individually for each model by adding `slack_config` to the model's `meta` section:
|
||||
|
||||
#### Basic Model Configuration
|
||||
|
||||
```sql
|
||||
-- models/critical/dim_customers.sql
|
||||
{{ config(
|
||||
meta={
|
||||
'slack_config': {
|
||||
'enabled': true,
|
||||
'notification_mode': 'error_only', # success_only, error_only, both
|
||||
'mention': '@here' # Optional: notify team members
|
||||
}
|
||||
}
|
||||
) }}
|
||||
|
||||
SELECT * FROM {{ ref('raw_customers') }}
|
||||
```
|
||||
|
||||
#### Custom Message Format
|
||||
|
||||
```sql
|
||||
-- models/critical/fact_revenue.sql
|
||||
{{ config(
|
||||
meta={
|
||||
'slack_config': {
|
||||
'enabled': true,
|
||||
'notification_mode': 'both',
|
||||
'channel': 'C1234567890', # Override default channel
|
||||
'custom_message': {
|
||||
'text': '💰 Revenue model {model_name} {status_emoji}',
|
||||
'username': 'Revenue Bot',
|
||||
'icon_emoji': ':money_with_wings:',
|
||||
'attachments': [
|
||||
{
|
||||
'color': 'good' if '{status}' == 'success' else 'danger',
|
||||
'title': 'Critical Revenue Model Alert',
|
||||
'fields': [
|
||||
{'title': 'Model', 'value': '{model_name}', 'short': true},
|
||||
{'title': 'Status', 'value': '{status_emoji} {status}', 'short': true},
|
||||
{'title': 'Environment', 'value': '{environment}', 'short': true},
|
||||
{'title': 'Duration', 'value': '{execution_time}s', 'short': true}
|
||||
],
|
||||
'footer': 'Revenue Team • {repository}'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
) }}
|
||||
|
||||
SELECT * FROM {{ ref('raw_transactions') }}
|
||||
```
|
||||
|
||||
#### Different Slack Channels per Model
|
||||
|
||||
```sql
|
||||
-- models/marketing/marketing_metrics.sql
|
||||
{{ config(
|
||||
meta={
|
||||
'slack_config': {
|
||||
'enabled': true,
|
||||
'channel': '#marketing-alerts', # Marketing team channel
|
||||
'webhook_url': 'https://hooks.slack.com/services/MARKETING/WEBHOOK/URL',
|
||||
'notification_mode': 'error_only',
|
||||
'mention': '<@U1234567890>' # Mention specific user by ID
|
||||
}
|
||||
}
|
||||
) }}
|
||||
```
|
||||
|
||||
#### Mention Options
|
||||
|
||||
You can notify specific people or groups using the `mention` parameter:
|
||||
|
||||
```sql
|
||||
-- Different mention formats
|
||||
{{ config(
|
||||
meta={
|
||||
'slack_config': {
|
||||
'enabled': true,
|
||||
'mention': '@here' # Notify all active members
|
||||
}
|
||||
}
|
||||
) }}
|
||||
|
||||
{{ config(
|
||||
meta={
|
||||
'slack_config': {
|
||||
'enabled': true,
|
||||
'mention': '@channel' # Notify all channel members
|
||||
}
|
||||
}
|
||||
) }}
|
||||
|
||||
{{ config(
|
||||
meta={
|
||||
'slack_config': {
|
||||
'enabled': true,
|
||||
'mention': '<@U1234567890>' # Mention specific user by ID
|
||||
}
|
||||
}
|
||||
) }}
|
||||
|
||||
{{ config(
|
||||
meta={
|
||||
'slack_config': {
|
||||
'enabled': true,
|
||||
'mention': '@username' # Mention by username (if supported)
|
||||
}
|
||||
}
|
||||
) }}
|
||||
```
|
||||
|
||||
#### Available Variables for Custom Messages
|
||||
|
||||
Use these variables in your `custom_message` templates:
|
||||
|
||||
| Variable | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `{model_name}` | Model name | `dim_customers` |
|
||||
| `{status}` | Model status | `success`, `error` |
|
||||
| `{status_emoji}` | Status emoji | `✅`, `❌` |
|
||||
| `{environment}` | dbt target | `prod`, `dev` |
|
||||
| `{repository}` | GitHub repository | `FlipsideCrypto/analytics` |
|
||||
| `{execution_time}` | Execution seconds | `12.5` |
|
||||
|
||||
### 3. Example Notifications
|
||||
|
||||
With per-model configuration, each model sends its own notification using Slack's modern Block Kit layout with colored sidebars. Here are some examples:
|
||||
|
||||
**Default Model Notification (Success with Mention):**
|
||||
```
|
||||
🟢 ┌─────────────────────────────────────┐
|
||||
│ Hi @here, ✅ Model: dim_customers │
|
||||
├─────────────────────────────────────┤
|
||||
│ Success execution completed │
|
||||
│ │
|
||||
│ Environment: Execution Time: │
|
||||
│ prod 12.5s │
|
||||
│ │
|
||||
│ Repository: │
|
||||
│ FlipsideCrypto/analytics │
|
||||
│ │
|
||||
│ dbt via Livequery │
|
||||
└─────────────────────────────────────┘
|
||||
```
|
||||
|
||||
**Custom Revenue Model Notification (Error):**
|
||||
```
|
||||
🔴 ┌─────────────────────────────────────┐
|
||||
│ 💰 Revenue model fact_revenue ❌ │
|
||||
├─────────────────────────────────────┤
|
||||
│ Critical Revenue Model Alert │
|
||||
│ Model: fact_revenue │
|
||||
│ Status: ❌ Error │
|
||||
│ Environment: prod │
|
||||
│ Duration: 45.2s │
|
||||
│ │
|
||||
│ Error Message: │
|
||||
│ Division by zero in line 23... │
|
||||
│ │
|
||||
│ Revenue Team • FlipsideCrypto/analytics │
|
||||
└─────────────────────────────────────┘
|
||||
```
|
||||
|
||||
**Marketing Model Notification (Success with User Mention):**
|
||||
```
|
||||
🟢 ┌─────────────────────────────────────┐
|
||||
│ Hi <@U1234567890>, ✅ Model: marketing_metrics │
|
||||
├─────────────────────────────────────┤
|
||||
│ Success execution completed │
|
||||
│ │
|
||||
│ Environment: Execution Time: │
|
||||
│ prod 8.1s │
|
||||
│ │
|
||||
│ Repository: │
|
||||
│ FlipsideCrypto/analytics │
|
||||
│ │
|
||||
│ dbt via Livequery │
|
||||
└─────────────────────────────────────┘
|
||||
```
|
||||
|
||||
*Note: The colored circles (🟢🔴) represent Slack's colored sidebar. The actual messages will display as rich Block Kit layouts with colored left borders in Slack.*
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Manual Function Calls
|
||||
|
||||
For custom use cases, call functions directly:
|
||||
|
||||
#### Basic Webhook Message
|
||||
```sql
|
||||
SELECT slack.webhook_send(
|
||||
'https://hooks.slack.com/services/YOUR/WEBHOOK/URL',
|
||||
{
|
||||
'text': 'Hello from Livequery!',
|
||||
'username': 'Data Bot'
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
#### Rich Web API Message with Blocks
|
||||
```sql
|
||||
SELECT slack.post_message(
|
||||
'xoxb-your-bot-token',
|
||||
'C087GJQ1ZHQ',
|
||||
{
|
||||
'text': 'Pipeline completed!',
|
||||
'blocks': [
|
||||
{
|
||||
'type': 'header',
|
||||
'text': {
|
||||
'type': 'plain_text',
|
||||
'text': ':white_check_mark: Pipeline Success'
|
||||
}
|
||||
},
|
||||
{
|
||||
'type': 'section',
|
||||
'fields': [
|
||||
{'type': 'mrkdwn', 'text': '*Repository:*\nFlipsideCrypto/my-repo'},
|
||||
{'type': 'mrkdwn', 'text': '*Duration:*\n15m 30s'}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
#### Threading Example (Web API Only)
|
||||
```sql
|
||||
-- First send main message
|
||||
WITH main_message AS (
|
||||
SELECT slack.post_message(
|
||||
'xoxb-your-bot-token',
|
||||
'C087GJQ1ZHQ',
|
||||
{'text': 'Pipeline failed with 3 errors. Details in thread...'}
|
||||
) as response
|
||||
)
|
||||
-- Then send threaded replies
|
||||
SELECT slack.post_reply(
|
||||
'xoxb-your-bot-token',
|
||||
'C087GJQ1ZHQ',
|
||||
main_message.response:data:ts::STRING, -- Use timestamp from main message
|
||||
{'text': 'Error 1: Database connection timeout'}
|
||||
) as thread_response
|
||||
FROM main_message;
|
||||
```
|
||||
|
||||
### Conditional Notifications
|
||||
|
||||
Add conditions to control when notifications are sent:
|
||||
|
||||
```yaml
|
||||
# dbt_project.yml
|
||||
on-run-end:
|
||||
# Only send notifications in production
|
||||
- "{% if target.name == 'prod' %}{{ slack_notify_on_run_end(results) }}{% endif %}"
|
||||
|
||||
# Or use environment variable control
|
||||
- "{% if env_var('SEND_SLACK_NOTIFICATIONS', 'false') == 'true' %}{{ slack_notify_on_run_end(results) }}{% endif %}"
|
||||
```
|
||||
|
||||
### Advanced: Custom Message Format
|
||||
|
||||
For full control over the message format, use the lower-level functions:
|
||||
|
||||
```yaml
|
||||
on-run-end: |
|
||||
{% if execute %}
|
||||
{% set status = 'success' if results|selectattr('status', 'equalto', 'error')|list|length == 0 else 'failed' %}
|
||||
|
||||
SELECT slack.webhook_send(
|
||||
'{{ env_var("SLACK_WEBHOOK_URL") }}',
|
||||
{
|
||||
'text': 'dbt run {{ status }}',
|
||||
'attachments': [
|
||||
{
|
||||
'color': '{{ "#36a64f" if status == "success" else "#ff0000" }}',
|
||||
'title': 'dbt {{ status|title }}',
|
||||
'fields': [
|
||||
{'title': 'Models', 'value': '{{ results|length }}', 'short': true},
|
||||
{'title': 'Failed', 'value': '{{ results|selectattr("status", "equalto", "error")|list|length }}', 'short': true}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
);
|
||||
{% endif %}
|
||||
```
|
||||
|
||||
## Configuration Reference
|
||||
|
||||
### Global Environment Variables (Optional)
|
||||
|
||||
Models can override these global settings. Only set these if you want fallback defaults:
|
||||
|
||||
```bash
|
||||
# Default Slack connection (models can override)
|
||||
export SLACK_WEBHOOK_URL="https://hooks.slack.com/services/YOUR/WEBHOOK/URL"
|
||||
# OR
|
||||
export SLACK_BOT_TOKEN="xoxb-your-bot-token"
|
||||
export SLACK_CHANNEL="C087GJQ1ZHQ"
|
||||
|
||||
# Optional global settings
|
||||
export SLACK_BOT_USERNAME="dbt Bot"
|
||||
export GITHUB_REPOSITORY="your-org/your-repo"
|
||||
```
|
||||
|
||||
### Notification Modes
|
||||
|
||||
- **`error_only`** (default) - Only notify on failures
|
||||
- **`success_only`** - Only notify on successful runs
|
||||
- **`both`** - Notify on both success and failure
|
||||
|
||||
### Mention Options
|
||||
|
||||
The `mention` parameter allows you to notify specific users or groups:
|
||||
|
||||
- **`@here`** - Notify all active members in the channel
|
||||
- **`@channel`** - Notify all members in the channel (use sparingly)
|
||||
- **`<@U1234567890>`** - Mention specific user by Slack user ID (recommended)
|
||||
- **`<@U1234567890|username>`** - Mention user with display name
|
||||
- **`@username`** - Mention by username (may not work in all workspaces)
|
||||
|
||||
**Note:** To find a user's Slack ID, right-click their profile → "Copy member ID"
|
||||
|
||||
## Functions Reference
|
||||
|
||||
### `slack.webhook_send(webhook_url, payload)`
|
||||
Send messages via Slack Incoming Webhooks.
|
||||
|
||||
**Parameters:**
|
||||
- `webhook_url` - Your Slack webhook URL
|
||||
- `payload` - JSON object following [Slack webhook format](https://api.slack.com/messaging/webhooks)
|
||||
|
||||
### `slack.post_message(bot_token, channel, payload)`
|
||||
Send messages via Slack Web API (chat.postMessage).
|
||||
|
||||
**Parameters:**
|
||||
- `bot_token` - Your Slack bot token (xoxb-...)
|
||||
- `channel` - Channel ID (C...) or name (#channel)
|
||||
- `payload` - JSON object following [Slack chat.postMessage format](https://api.slack.com/methods/chat.postMessage)
|
||||
|
||||
### `slack.post_reply(bot_token, channel, thread_ts, payload)`
|
||||
Send threaded replies via Slack Web API.
|
||||
|
||||
**Parameters:**
|
||||
- `bot_token` - Your Slack bot token
|
||||
- `channel` - Channel ID or name
|
||||
- `thread_ts` - Parent message timestamp for threading
|
||||
- `payload` - JSON object following Slack chat.postMessage format
|
||||
|
||||
### Validation Functions
|
||||
- `slack_utils.validate_webhook_url(url)` - Check if webhook URL is valid
|
||||
- `slack_utils.validate_bot_token(token)` - Check if bot token is valid
|
||||
- `slack_utils.validate_channel(channel)` - Check if channel format is valid
|
||||
|
||||
## Testing Without Spamming Slack
|
||||
|
||||
### Built-in Tests
|
||||
The integration includes comprehensive tests that use mock endpoints instead of real Slack channels:
|
||||
|
||||
- **httpbin.org** - Tests HTTP mechanics and payload formatting
|
||||
- **Validation functions** - Test URL/token/channel format validation
|
||||
- **Error scenarios** - Test authentication failures and invalid endpoints
|
||||
|
||||
### Manual Testing Options
|
||||
|
||||
#### 1. Test with httpbin.org (Recommended for Development)
|
||||
```sql
|
||||
-- Test webhook functionality without hitting Slack
|
||||
SELECT slack.webhook_send(
|
||||
'https://httpbin.org/post',
|
||||
{'text': 'Test message', 'username': 'Test Bot'}
|
||||
);
|
||||
|
||||
-- Verify the request was formatted correctly
|
||||
-- httpbin.org returns the request data in the response
|
||||
```
|
||||
|
||||
#### 2. Test with webhook.site (Inspect Real Payloads)
|
||||
```sql
|
||||
-- Create a unique URL at https://webhook.site/ and use it
|
||||
SELECT slack.webhook_send(
|
||||
'https://webhook.site/your-unique-id',
|
||||
{'text': 'Test message with full Slack formatting'}
|
||||
);
|
||||
|
||||
-- View the captured request at webhook.site to see exactly what Slack would receive
|
||||
```
|
||||
|
||||
#### 3. Test Workspace (Real Slack Testing)
|
||||
Create a dedicated test workspace or use a private test channel:
|
||||
|
||||
```sql
|
||||
-- Use environment variables to switch between test and prod
|
||||
SELECT slack.webhook_send(
|
||||
'{{ env_var("SLACK_TEST_WEBHOOK_URL") }}', -- Test webhook
|
||||
{'text': 'Safe test in dedicated channel'}
|
||||
);
|
||||
```
|
||||
|
||||
#### 4. Conditional Testing
|
||||
```yaml
|
||||
# dbt_project.yml - Only send notifications in specific environments
|
||||
on-run-end:
|
||||
- "{% if target.name == 'prod' %}{{ slack_notify_on_run_end(results) }}{% endif %}"
|
||||
- "{% if env_var('SLACK_TESTING_MODE', 'false') == 'true' %}{{ slack_notify_on_run_end(results) }}{% endif %}"
|
||||
```
|
||||
|
||||
### Environment Variables for Testing
|
||||
```bash
|
||||
# Production Slack
|
||||
export SLACK_WEBHOOK_URL="https://hooks.slack.com/services/YOUR/PROD/WEBHOOK"
|
||||
|
||||
# Testing alternatives
|
||||
export SLACK_TEST_WEBHOOK_URL="https://webhook.site/your-unique-id"
|
||||
export SLACK_HTTPBIN_TEST_URL="https://httpbin.org/post"
|
||||
export SLACK_TESTING_MODE="true"
|
||||
```
|
||||
|
||||
## How It Works
|
||||
|
||||
1. **You construct the payload** - Use Slack's official API documentation to build your JSON
|
||||
2. **Livequery delivers it** - We handle the HTTP request to Slack
|
||||
3. **Get the response** - Standard Slack API response with success/error info
|
||||
|
||||
## Slack API Documentation
|
||||
|
||||
- [Webhook Format](https://api.slack.com/messaging/webhooks) - For webhook_send()
|
||||
- [chat.postMessage](https://api.slack.com/methods/chat.postMessage) - For post_message()
|
||||
- [Block Kit](https://api.slack.com/block-kit) - For rich interactive messages
|
||||
- [Message Formatting](https://api.slack.com/reference/surfaces/formatting) - Text formatting guide
|
||||
|
||||
That's it! No complex configurations, no templates to learn. Just Slack's API delivered through Livequery.
|
||||
58
macros/marketplace/slack/messaging_udfs.yaml.sql
Normal file
58
macros/marketplace/slack/messaging_udfs.yaml.sql
Normal file
@ -0,0 +1,58 @@
|
||||
{% macro config_slack_messaging_udfs(schema_name = "slack", utils_schema_name = "slack_utils") -%}
|
||||
{#
|
||||
This macro is used to generate API calls to Slack API endpoints
|
||||
#}
|
||||
|
||||
{# Slack Webhook Messages #}
|
||||
- name: {{ schema_name }}.webhook_send
|
||||
signature:
|
||||
- [WEBHOOK_URL, STRING, Slack webhook URL]
|
||||
- [PAYLOAD, OBJECT, Complete Slack message payload according to Slack API spec]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Send a message to Slack via webhook [API docs: Webhooks](https://api.slack.com/messaging/webhooks)$$
|
||||
sql: |
|
||||
SELECT slack_utils.post_webhook(
|
||||
WEBHOOK_URL,
|
||||
PAYLOAD
|
||||
) as response
|
||||
|
||||
{# Slack Web API Messages #}
|
||||
- name: {{ schema_name }}.post_message
|
||||
signature:
|
||||
- [BOT_TOKEN, STRING, Slack bot token (xoxb-...)]
|
||||
- [CHANNEL, STRING, Slack channel ID or name]
|
||||
- [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Send a message to Slack via Web API [API docs: chat.postMessage](https://api.slack.com/methods/chat.postMessage)$$
|
||||
sql: |
|
||||
SELECT slack_utils.post_message(
|
||||
BOT_TOKEN,
|
||||
CHANNEL,
|
||||
PAYLOAD
|
||||
) as response
|
||||
|
||||
|
||||
- name: {{ schema_name }}.post_reply
|
||||
signature:
|
||||
- [BOT_TOKEN, STRING, Slack bot token (xoxb-...)]
|
||||
- [CHANNEL, STRING, Slack channel ID or name]
|
||||
- [THREAD_TS, STRING, Parent message timestamp for threading]
|
||||
- [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Send a threaded reply to Slack via Web API [API docs: chat.postMessage](https://api.slack.com/methods/chat.postMessage)$$
|
||||
sql: |
|
||||
SELECT slack_utils.post_reply(
|
||||
BOT_TOKEN,
|
||||
CHANNEL,
|
||||
THREAD_TS,
|
||||
PAYLOAD
|
||||
) as response
|
||||
|
||||
|
||||
{% endmacro %}
|
||||
140
macros/marketplace/slack/utils_udfs.yaml.sql
Normal file
140
macros/marketplace/slack/utils_udfs.yaml.sql
Normal file
@ -0,0 +1,140 @@
|
||||
{% macro config_slack_utils_udfs(schema_name = "slack_utils", utils_schema_name = "slack_utils") -%}
|
||||
{#
|
||||
This macro is used to generate API calls to Slack API endpoints
|
||||
#}
|
||||
|
||||
- name: {{ schema_name }}.post_webhook
|
||||
signature:
|
||||
- [WEBHOOK_URL, STRING, Slack webhook URL]
|
||||
- [PAYLOAD, OBJECT, Complete Slack message payload according to Slack API spec]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Send a message to Slack via webhook. User provides complete payload according to Slack webhook API spec.$$
|
||||
sql: |
|
||||
SELECT CASE
|
||||
WHEN WEBHOOK_URL IS NULL OR WEBHOOK_URL = '' THEN
|
||||
OBJECT_CONSTRUCT('ok', false, 'error', 'webhook_url is required')
|
||||
WHEN NOT STARTSWITH(WEBHOOK_URL, 'https://hooks.slack.com/') THEN
|
||||
OBJECT_CONSTRUCT('ok', false, 'error', 'Invalid webhook URL format')
|
||||
WHEN PAYLOAD IS NULL THEN
|
||||
OBJECT_CONSTRUCT('ok', false, 'error', 'payload is required')
|
||||
ELSE
|
||||
live.udf_api(
|
||||
'POST',
|
||||
WEBHOOK_URL,
|
||||
OBJECT_CONSTRUCT('Content-Type', 'application/json'),
|
||||
PAYLOAD
|
||||
)
|
||||
END as response
|
||||
|
||||
- name: {{ schema_name }}.post_message
|
||||
signature:
|
||||
- [BOT_TOKEN, STRING, Slack bot token (xoxb-...)]
|
||||
- [CHANNEL, STRING, Slack channel ID or name]
|
||||
- [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Send a message to Slack via Web API chat.postMessage. User provides complete payload according to Slack API spec.$$
|
||||
sql: |
|
||||
SELECT CASE
|
||||
WHEN BOT_TOKEN IS NULL OR BOT_TOKEN = '' THEN
|
||||
OBJECT_CONSTRUCT('ok', false, 'error', 'bot_token is required')
|
||||
WHEN NOT STARTSWITH(BOT_TOKEN, 'xoxb-') THEN
|
||||
OBJECT_CONSTRUCT('ok', false, 'error', 'Invalid bot token format')
|
||||
WHEN CHANNEL IS NULL OR CHANNEL = '' THEN
|
||||
OBJECT_CONSTRUCT('ok', false, 'error', 'channel is required')
|
||||
WHEN PAYLOAD IS NULL THEN
|
||||
OBJECT_CONSTRUCT('ok', false, 'error', 'payload is required')
|
||||
ELSE
|
||||
live.udf_api(
|
||||
'POST',
|
||||
'https://slack.com/api/chat.postMessage',
|
||||
OBJECT_CONSTRUCT(
|
||||
'Authorization', 'Bearer ' || BOT_TOKEN,
|
||||
'Content-Type', 'application/json'
|
||||
),
|
||||
OBJECT_INSERT(PAYLOAD, 'channel', CHANNEL)
|
||||
)
|
||||
END as response
|
||||
|
||||
- name: {{ schema_name }}.post_reply
|
||||
signature:
|
||||
- [BOT_TOKEN, STRING, Slack bot token (xoxb-...)]
|
||||
- [CHANNEL, STRING, Slack channel ID or name]
|
||||
- [THREAD_TS, STRING, Parent message timestamp for threading]
|
||||
- [PAYLOAD, OBJECT, Message payload according to Slack chat.postMessage API spec]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Send a threaded reply to Slack via Web API. User provides complete payload according to Slack API spec.$$
|
||||
sql: |
|
||||
SELECT CASE
|
||||
WHEN BOT_TOKEN IS NULL OR BOT_TOKEN = '' THEN
|
||||
OBJECT_CONSTRUCT('ok', false, 'error', 'bot_token is required')
|
||||
WHEN NOT STARTSWITH(BOT_TOKEN, 'xoxb-') THEN
|
||||
OBJECT_CONSTRUCT('ok', false, 'error', 'Invalid bot token format')
|
||||
WHEN CHANNEL IS NULL OR CHANNEL = '' THEN
|
||||
OBJECT_CONSTRUCT('ok', false, 'error', 'channel is required')
|
||||
WHEN THREAD_TS IS NULL OR THREAD_TS = '' THEN
|
||||
OBJECT_CONSTRUCT('ok', false, 'error', 'thread_ts is required')
|
||||
WHEN PAYLOAD IS NULL THEN
|
||||
OBJECT_CONSTRUCT('ok', false, 'error', 'payload is required')
|
||||
ELSE
|
||||
live.udf_api(
|
||||
'POST',
|
||||
'https://slack.com/api/chat.postMessage',
|
||||
OBJECT_CONSTRUCT(
|
||||
'Authorization', 'Bearer ' || BOT_TOKEN,
|
||||
'Content-Type', 'application/json'
|
||||
),
|
||||
OBJECT_INSERT(
|
||||
OBJECT_INSERT(PAYLOAD, 'channel', CHANNEL),
|
||||
'thread_ts', THREAD_TS
|
||||
)
|
||||
)
|
||||
END as response
|
||||
|
||||
- name: {{ schema_name }}.validate_webhook_url
|
||||
signature:
|
||||
- [WEBHOOK_URL, STRING, Webhook URL to validate]
|
||||
return_type:
|
||||
- "BOOLEAN"
|
||||
options: |
|
||||
COMMENT = $$Validate if a string is a proper Slack webhook URL format.$$
|
||||
sql: |
|
||||
SELECT WEBHOOK_URL IS NOT NULL
|
||||
AND STARTSWITH(WEBHOOK_URL, 'https://hooks.slack.com/services/')
|
||||
AND LENGTH(WEBHOOK_URL) > 50
|
||||
|
||||
- name: {{ schema_name }}.validate_bot_token
|
||||
signature:
|
||||
- [BOT_TOKEN, STRING, Bot token to validate]
|
||||
return_type:
|
||||
- "BOOLEAN"
|
||||
options: |
|
||||
COMMENT = $$Validate if a string is a proper Slack bot token format.$$
|
||||
sql: |
|
||||
SELECT BOT_TOKEN IS NOT NULL
|
||||
AND STARTSWITH(BOT_TOKEN, 'xoxb-')
|
||||
AND LENGTH(BOT_TOKEN) > 20
|
||||
|
||||
- name: {{ schema_name }}.validate_channel
|
||||
signature:
|
||||
- [CHANNEL, STRING, Channel ID or name to validate]
|
||||
return_type:
|
||||
- "BOOLEAN"
|
||||
options: |
|
||||
COMMENT = $$Validate if a string is a proper Slack channel ID or name format.$$
|
||||
sql: |
|
||||
SELECT CHANNEL IS NOT NULL
|
||||
AND LENGTH(CHANNEL) > 0
|
||||
AND (
|
||||
STARTSWITH(CHANNEL, 'C') OR -- Channel ID
|
||||
STARTSWITH(CHANNEL, 'D') OR -- DM ID
|
||||
STARTSWITH(CHANNEL, 'G') OR -- Group/Private channel ID
|
||||
STARTSWITH(CHANNEL, '#') -- Channel name
|
||||
)
|
||||
|
||||
{% endmacro %}
|
||||
45
macros/marketplace/snapshot/README.md
Normal file
45
macros/marketplace/snapshot/README.md
Normal file
@ -0,0 +1,45 @@
|
||||
# Snapshot API Integration
|
||||
|
||||
Snapshot is a decentralized voting platform that provides APIs for accessing DAO governance data, proposals, votes, and community participation metrics.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Snapshot API key from [Snapshot Hub](https://snapshot.org/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/SNAPSHOT`
|
||||
|
||||
3. Deploy the Snapshot marketplace functions:
|
||||
```bash
|
||||
dbt run --models snapshot__ snapshot_utils__snapshot_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `snapshot.get(path, query_args)`
|
||||
Make GET requests to Snapshot API endpoints.
|
||||
|
||||
### `snapshot.post(path, body)`
|
||||
Make POST requests to Snapshot GraphQL API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get DAO spaces
|
||||
SELECT snapshot.post('/graphql', {
|
||||
'query': 'query { spaces(first: 20, orderBy: "created", orderDirection: desc) { id name } }'
|
||||
});
|
||||
|
||||
-- Get proposals for a space
|
||||
SELECT snapshot.post('/graphql', {
|
||||
'query': 'query { proposals(first: 10, where: {space: "uniswap"}) { id title state } }'
|
||||
});
|
||||
|
||||
-- Get votes for a proposal
|
||||
SELECT snapshot.post('/graphql', {
|
||||
'query': 'query { votes(first: 100, where: {proposal: "proposal_id"}) { voter choice } }'
|
||||
});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Snapshot API Documentation](https://docs.snapshot.org/)
|
||||
36
macros/marketplace/solscan/README.md
Normal file
36
macros/marketplace/solscan/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# Solscan API Integration
|
||||
|
||||
Solscan is a leading Solana blockchain explorer providing comprehensive APIs for accessing Solana transaction data, account information, and network statistics.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Solscan API key from [Solscan API Portal](https://pro-api.solscan.io/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/SOLSCAN`
|
||||
|
||||
3. Deploy the Solscan marketplace functions:
|
||||
```bash
|
||||
dbt run --models solscan__ solscan_utils__solscan_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `solscan.get(path, query_args)`
|
||||
Make GET requests to Solscan API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get account information
|
||||
SELECT solscan.get('/account', {'address': 'account_address'});
|
||||
|
||||
-- Get transaction details
|
||||
SELECT solscan.get('/transaction', {'signature': 'transaction_signature'});
|
||||
|
||||
-- Get token information
|
||||
SELECT solscan.get('/token/meta', {'token': 'token_address'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Solscan API Documentation](https://docs.solscan.io/)
|
||||
36
macros/marketplace/stakingrewards/README.md
Normal file
36
macros/marketplace/stakingrewards/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# Staking Rewards API Integration
|
||||
|
||||
Staking Rewards provides comprehensive data on cryptocurrency staking opportunities, validator performance, and yield farming across multiple blockchain networks.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Staking Rewards API key from [Staking Rewards API Portal](https://stakingrewards.com/api)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/STAKINGREWARDS`
|
||||
|
||||
3. Deploy the Staking Rewards marketplace functions:
|
||||
```bash
|
||||
dbt run --models stakingrewards__ stakingrewards_utils__stakingrewards_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `stakingrewards.get(path, query_args)`
|
||||
Make GET requests to Staking Rewards API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get staking assets
|
||||
SELECT stakingrewards.get('/assets', {'limit': 100});
|
||||
|
||||
-- Get validator information
|
||||
SELECT stakingrewards.get('/validators', {'asset': 'ethereum', 'limit': 50});
|
||||
|
||||
-- Get staking rewards data
|
||||
SELECT stakingrewards.get('/rewards', {'asset': 'solana', 'timeframe': '30d'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Staking Rewards API Documentation](https://docs.stakingrewards.com/)
|
||||
39
macros/marketplace/strangelove/README.md
Normal file
39
macros/marketplace/strangelove/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Strangelove API Integration
|
||||
|
||||
Strangelove provides blockchain infrastructure and data services for Cosmos ecosystem blockchains, offering APIs for accessing cross-chain data and IBC information.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Strangelove API key from [Strangelove Ventures](https://strangelove.ventures/)
|
||||
|
||||
2. Store the API key in Snowflakerets under `_FSC_SYS/STRANGELOVE`
|
||||
|
||||
3. Deploy the Strangelove marketplace functions:
|
||||
```bash
|
||||
dbt run --models strangelove__ strangelove_utils__strangelove_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `strangelove.get(path, query_args)`
|
||||
Make GET requests to Strangelove API endpoints.
|
||||
|
||||
### `strangelove.post(path, body)`
|
||||
Make POST requests to Strangelove API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get Cosmos network data
|
||||
SELECT strangelove.get('/api/v1/chains', {});
|
||||
|
||||
-- Get IBC transfer data
|
||||
SELECT strangelove.get('/api/v1/ibc/transfers', {'chain': 'cosmoshub', 'limit': 100});
|
||||
|
||||
-- Get validator information
|
||||
SELECT strangelove.get('/api/v1/validators', {'chain': 'osmosis'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Strangelove API Documentation](https://docs.strangelove.ventures/)
|
||||
45
macros/marketplace/subquery/README.md
Normal file
45
macros/marketplace/subquery/README.md
Normal file
@ -0,0 +1,45 @@
|
||||
# SubQuery API Integration
|
||||
|
||||
SubQuery provides decentralized data indexing infrastructure for Web3, offering APIs to access indexed blockchain data across multiple networks including Polkadot, Ethereum, and Cosmos.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your SubQuery API key from [SubQuery Managed Service](https://managedservice.subquery.network/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/SUBQUERY`
|
||||
|
||||
3. Deploy the SubQuery marketplace functions:
|
||||
```bash
|
||||
dbt run --models subquery__ subquery_utils__subquery_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `subquery.get(path, query_args)`
|
||||
Make GET requests to SubQuery API endpoints.
|
||||
|
||||
### `subquery.post(path, body)`
|
||||
Make POST requests to SubQuery GraphQL API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get indexed project data
|
||||
SELECT subquery.post('/graphql', {
|
||||
'query': 'query { transfers(first: 10) { id from to value } }'
|
||||
});
|
||||
|
||||
-- Get block information
|
||||
SELECT subquery.post('/graphql', {
|
||||
'query': 'query { blocks(first: 5, orderBy: NUMBER_DESC) { id number timestamp } }'
|
||||
});
|
||||
|
||||
-- Get account transactions
|
||||
SELECT subquery.post('/graphql', {
|
||||
'query': 'query { accounts(filter: {id: {equalTo: "address"}}) { id transactions { nodes { id } } } }'
|
||||
});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [SubQuery API Documentation](https://academy.subquery.network/)
|
||||
36
macros/marketplace/topshot/README.md
Normal file
36
macros/marketplace/topshot/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# NBA Top Shot API Integration
|
||||
|
||||
NBA Top Shot is Dapper Labs' basketball NFT platform featuring officially licensed NBA highlights as digital collectible Moments.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your NBA Top Shot API key from [Dapper Labs Developer Portal](https://developers.dapperlabs.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/TOPSHOT`
|
||||
|
||||
3. Deploy the Top Shot marketplace functions:
|
||||
```bash
|
||||
dbt run --models topshot__ topshot_utils__topshot_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `topshot.get(path, query_args)`
|
||||
Make GET requests to NBA Top Shot API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get Top Shot collections
|
||||
SELECT topshot.get('/collections', {});
|
||||
|
||||
-- Get moment details
|
||||
SELECT topshot.get('/moments/12345', {});
|
||||
|
||||
-- Get marketplace listings
|
||||
SELECT topshot.get('/marketplace/listings', {'player': 'lebron-james', 'limit': 50});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [NBA Top Shot API Documentation](https://developers.dapperlabs.com/)
|
||||
39
macros/marketplace/transpose/README.md
Normal file
39
macros/marketplace/transpose/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Transpose API Integration
|
||||
|
||||
Transpose provides real-time blockchain data infrastructure with APIs for accessing NFT data, DeFi protocols, and on-chain analytics across multiple networks.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Transpose API key from [Transpose Dashboard](https://dashboard.transpose.io/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/TRANSPOSE`
|
||||
|
||||
3. Deploy the Transpose marketplace functions:
|
||||
```bash
|
||||
dbt run --models transpose__ transpose_utils__transpose_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `transpose.get(path, query_args)`
|
||||
Make GET requests to Transpose API endpoints.
|
||||
|
||||
### `transpose.post(path, body)`
|
||||
Make POST requests to Transpose API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get NFT collection data
|
||||
SELECT transpose.get('/v0/ethereum/collections/0x...', {});
|
||||
|
||||
-- Get account NFTs
|
||||
SELECT transpose.get('/v0/ethereum/nfts/by-owner', {'owner_address': '0x...', 'limit': 100});
|
||||
|
||||
-- Get token transfers
|
||||
SELECT transpose.get('/v0/ethereum/transfers', {'contract_address': '0x...', 'limit': 50});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Transpose API Documentation](https://docs.transpose.io/)
|
||||
36
macros/marketplace/zapper/README.md
Normal file
36
macros/marketplace/zapper/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# Zapper API Integration
|
||||
|
||||
Zapper provides DeFi portfolio tracking and analytics with APIs for accessing wallet balances, DeFi positions, transaction history, and yield farming opportunities.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your Zapper API key from [Zapper API Portal](https://api.zapper.fi/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/ZAPPER`
|
||||
|
||||
3. Deploy the Zapper marketplace functions:
|
||||
```bash
|
||||
dbt run --models zapper__ zapper_utils__zapper_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `zapper.get(path, query_args)`
|
||||
Make GET requests to Zapper API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get wallet token balances
|
||||
SELECT zapper.get('/v2/balances', {'addresses[]': '0x...', 'networks[]': 'ethereum'});
|
||||
|
||||
-- Get DeFi protocol positions
|
||||
SELECT zapper.get('/v2/apps/tokens', {'groupId': 'uniswap-v2', 'addresses[]': '0x...'});
|
||||
|
||||
-- Get transaction history
|
||||
SELECT zapper.get('/v2/transactions', {'address': '0x...', 'network': 'ethereum'});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [Zapper API Documentation](https://docs.zapper.fi/)
|
||||
45
macros/marketplace/zettablock/README.md
Normal file
45
macros/marketplace/zettablock/README.md
Normal file
@ -0,0 +1,45 @@
|
||||
# ZettaBlock API Integration
|
||||
|
||||
ZettaBlock provides real-time blockchain data infrastructure with GraphQL APIs for accessing multi-chain data, analytics, and custom data indexing.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get your ZettaBlock API key from [ZettaBlock Console](https://console.zettablock.com/)
|
||||
|
||||
2. Store the API key in Snowflake secrets under `_FSC_SYS/ZETTABLOCK`
|
||||
|
||||
3. Deploy the ZettaBlock marketplace functions:
|
||||
```bash
|
||||
dbt run --models zettablock__ zettablock_utils__zettablock_utils
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
### `zettablock.get(path, query_args)`
|
||||
Make GET requests to ZettaBlock API endpoints.
|
||||
|
||||
### `zettablock.post(path, body)`
|
||||
Make POST requests to ZettaBlock GraphQL API endpoints.
|
||||
|
||||
## Examples
|
||||
|
||||
```sql
|
||||
-- Get blockchain data via GraphQL
|
||||
SELECT zettablock.post('/graphql', {
|
||||
'query': 'query { ethereum { transactions(first: 10) { hash value gasPrice } } }'
|
||||
});
|
||||
|
||||
-- Get token information
|
||||
SELECT zettablock.post('/graphql', {
|
||||
'query': 'query { tokens(network: "ethereum", first: 20) { address symbol name } }'
|
||||
});
|
||||
|
||||
-- Get DeFi protocol data
|
||||
SELECT zettablock.post('/graphql', {
|
||||
'query': 'query { defi { protocols(first: 10) { name tvl volume24h } } }'
|
||||
});
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
- [ZettaBlock API Documentation](https://docs.zettablock.com/)
|
||||
31
macros/tests/udtfs.sql
Normal file
31
macros/tests/udtfs.sql
Normal file
@ -0,0 +1,31 @@
|
||||
{% macro base_test_udtf(model, udf, args, assertions) %}
|
||||
{#
|
||||
Generates a test for a User-Defined Table Function (UDTF).
|
||||
Unlike scalar UDFs, UDTFs return a table of results.
|
||||
#}
|
||||
{%- set call -%}
|
||||
SELECT * FROM TABLE({{ udf }}({{ args }}))
|
||||
{%- endset -%}
|
||||
|
||||
WITH test AS
|
||||
(
|
||||
SELECT
|
||||
'{{ udf }}' AS test_name
|
||||
,[{{ args }}] as parameters
|
||||
,t.*
|
||||
FROM TABLE({{ udf }}({{ args }})) t
|
||||
)
|
||||
|
||||
{% for assertion in assertions %}
|
||||
SELECT
|
||||
test_name,
|
||||
parameters,
|
||||
$${{ assertion }}$$ AS assertion,
|
||||
$${{ call }}$$ AS sql
|
||||
FROM test
|
||||
WHERE NOT {{ assertion }}
|
||||
{%- if not loop.last %}
|
||||
UNION ALL
|
||||
{%- endif -%}
|
||||
{%- endfor -%}
|
||||
{% endmacro %}
|
||||
@ -5,33 +5,283 @@ models:
|
||||
- name: workflows
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_actions__workflows_status_200
|
||||
name: test_github_actions__workflows_with_query
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
{'per_page': 5}
|
||||
assertions:
|
||||
- result:workflows IS NOT NULL
|
||||
- result:total_count IS NOT NULL
|
||||
- test_udf:
|
||||
name: test_github_actions__workflows_simple
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models'
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
- result:workflows IS NOT NULL
|
||||
- result:total_count IS NOT NULL
|
||||
|
||||
- name: runs
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_actions__runs_status_200
|
||||
name: test_github_actions__runs_with_query
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
{}
|
||||
{'per_page': 10, 'status': 'completed'}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
- result:workflow_runs IS NOT NULL
|
||||
- result:total_count IS NOT NULL
|
||||
- test_udf:
|
||||
name: test_github_actions__runs_simple
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models'
|
||||
assertions:
|
||||
- result:workflow_runs IS NOT NULL
|
||||
- result:total_count IS NOT NULL
|
||||
|
||||
- name: workflow_runs
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_actions__workflow_runs_status_200
|
||||
name: test_github_actions__workflow_runs_with_query
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'dbt_run_dev_refresh.yml',
|
||||
{}
|
||||
{'per_page': 5}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
- result:workflow_runs IS NOT NULL
|
||||
- result:total_count IS NOT NULL
|
||||
- test_udf:
|
||||
name: test_github_actions__workflow_runs_simple
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'dbt_run_dev_refresh.yml'
|
||||
assertions:
|
||||
- result:workflow_runs IS NOT NULL
|
||||
- result:total_count IS NOT NULL
|
||||
|
||||
- name: workflow_dispatches
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_actions__workflow_dispatches_with_body
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'test-workflow.yml',
|
||||
{'ref': 'main', 'inputs': {'debug': 'true'}}
|
||||
assertions:
|
||||
- result IS NOT NULL
|
||||
- test_udf:
|
||||
name: test_github_actions__workflow_dispatches_simple
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'test-workflow.yml'
|
||||
assertions:
|
||||
- result IS NOT NULL
|
||||
|
||||
- name: workflow_enable
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_actions__workflow_enable
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'test-workflow.yml'
|
||||
assertions:
|
||||
- result IS NOT NULL
|
||||
|
||||
- name: workflow_disable
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_actions__workflow_disable
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'test-workflow.yml'
|
||||
assertions:
|
||||
- result IS NOT NULL
|
||||
|
||||
- name: workflow_run_logs
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_actions__workflow_run_logs
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'12345678'
|
||||
assertions:
|
||||
- result IS NULL
|
||||
|
||||
- name: job_logs
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_actions__job_logs
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'87654321'
|
||||
assertions:
|
||||
- result IS NULL
|
||||
|
||||
- name: workflow_run_jobs
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_actions__workflow_run_jobs_with_query
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'12345678',
|
||||
{'filter': 'latest'}
|
||||
assertions:
|
||||
- result:jobs IS NULL
|
||||
- result:total_count IS NULL
|
||||
- test_udf:
|
||||
name: test_github_actions__workflow_run_jobs_simple
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'12345678'
|
||||
assertions:
|
||||
- result:jobs IS NULL
|
||||
- result:total_count IS NULL
|
||||
|
||||
# Table Function Tests
|
||||
- name: tf_workflows
|
||||
tests:
|
||||
- test_udtf:
|
||||
name: test_github_actions__tf_workflows_with_query
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
{'per_page': 3}
|
||||
assertions:
|
||||
- row_count >= 0
|
||||
- test_udtf:
|
||||
name: test_github_actions__tf_workflows_simple
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models'
|
||||
assertions:
|
||||
- row_count >= 0
|
||||
|
||||
- name: tf_runs
|
||||
tests:
|
||||
- test_udtf:
|
||||
name: test_github_actions__tf_runs_with_query
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
{'per_page': 5, 'status': 'completed'}
|
||||
assertions:
|
||||
- row_count >= 0
|
||||
- test_udtf:
|
||||
name: test_github_actions__tf_runs_simple
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models'
|
||||
assertions:
|
||||
- row_count >= 0
|
||||
|
||||
- name: tf_workflow_runs
|
||||
tests:
|
||||
- test_udtf:
|
||||
name: test_github_actions__tf_workflow_runs_with_query
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'dbt_run_dev_refresh.yml',
|
||||
{'per_page': 3}
|
||||
assertions:
|
||||
- row_count >= 0
|
||||
- test_udtf:
|
||||
name: test_github_actions__tf_workflow_runs_simple
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'dbt_run_dev_refresh.yml'
|
||||
assertions:
|
||||
- row_count >= 0
|
||||
|
||||
- name: tf_workflow_run_jobs
|
||||
tests:
|
||||
- test_udtf:
|
||||
name: test_github_actions__tf_workflow_run_jobs_with_query
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'12345678',
|
||||
{'filter': 'latest'}
|
||||
assertions:
|
||||
- row_count >= 0
|
||||
- test_udtf:
|
||||
name: test_github_actions__tf_workflow_run_jobs_simple
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'12345678'
|
||||
assertions:
|
||||
- row_count >= 0
|
||||
|
||||
- name: tf_failed_jobs_with_logs
|
||||
tests:
|
||||
- test_udtf:
|
||||
name: test_github_actions__tf_failed_jobs_with_logs
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'12345678'
|
||||
assertions:
|
||||
- row_count >= 0
|
||||
|
||||
- name: tf_failure_analysis_with_ai
|
||||
tests:
|
||||
- test_udtf:
|
||||
name: test_github_actions__tf_failure_analysis_with_ai_cortex
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'12345678',
|
||||
true,
|
||||
'cortex',
|
||||
NULL
|
||||
assertions:
|
||||
- row_count >= 0
|
||||
- test_udtf:
|
||||
name: test_github_actions__tf_failure_analysis_with_ai_claude
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'12345678',
|
||||
true,
|
||||
'claude',
|
||||
NULL
|
||||
assertions:
|
||||
- row_count >= 0
|
||||
- test_udtf:
|
||||
name: test_github_actions__tf_failure_analysis_with_ai_groq
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'12345678',
|
||||
true,
|
||||
'groq',
|
||||
'test-api-key'
|
||||
assertions:
|
||||
- row_count >= 0
|
||||
- test_udtf:
|
||||
name: test_github_actions__tf_failure_analysis_with_ai_disabled
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'12345678',
|
||||
false,
|
||||
'cortex',
|
||||
NULL
|
||||
assertions:
|
||||
- row_count >= 0
|
||||
|
||||
|
||||
@ -9,3 +9,58 @@ models:
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
- result:data IS NOT NULL
|
||||
|
||||
- name: headers
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_utils__headers_format
|
||||
assertions:
|
||||
- result IS NOT NULL
|
||||
- LENGTH(result) > 50
|
||||
- CONTAINS(result, 'Authorization')
|
||||
- CONTAINS(result, 'X-GitHub-Api-Version')
|
||||
- CONTAINS(result, 'Accept')
|
||||
|
||||
- name: get
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_utils__get_user_repos
|
||||
args: >
|
||||
'user/FlipsideCrypto',
|
||||
{'type': 'public', 'per_page': 5}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
- result:data IS NOT NULL
|
||||
- test_udf:
|
||||
name: test_github_utils__get_repo_info
|
||||
args: >
|
||||
'repos/FlipsideCrypto/admin-models',
|
||||
{}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:data:name = 'admin-models'
|
||||
- result:data:owner:login = 'FlipsideCrypto'
|
||||
|
||||
- name: post
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_utils__post_invalid_route
|
||||
args: >
|
||||
'invalid/test/route',
|
||||
{'test': 'data'}
|
||||
assertions:
|
||||
- result:status_code = 404
|
||||
- result IS NOT NULL
|
||||
|
||||
- name: put
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_utils__put_invalid_route
|
||||
args: >
|
||||
'invalid/test/route',
|
||||
{'test': 'data'}
|
||||
assertions:
|
||||
- result:status_code = 404
|
||||
- result IS NOT NULL
|
||||
|
||||
6
models/deploy/marketplace/groq/groq__.sql
Normal file
6
models/deploy/marketplace/groq/groq__.sql
Normal file
@ -0,0 +1,6 @@
|
||||
-- depends_on: {{ ref('live') }}
|
||||
-- depends_on: {{ ref('groq_utils__groq_utils') }}
|
||||
{%- set configs = [
|
||||
config_groq_chat_udfs,
|
||||
] -%}
|
||||
{{- ephemeral_deploy_marketplace(configs) -}}
|
||||
83
models/deploy/marketplace/groq/groq__.yml
Normal file
83
models/deploy/marketplace/groq/groq__.yml
Normal file
@ -0,0 +1,83 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: groq__
|
||||
columns:
|
||||
- name: chat_completions
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_groq__chat_completions_simple
|
||||
args: >
|
||||
[{'role': 'user', 'content': 'Hello, how are you?'}],
|
||||
'test-api-key'
|
||||
assertions:
|
||||
- result:choices IS NOT NULL
|
||||
- result:model IS NOT NULL
|
||||
- test_udf:
|
||||
name: test_groq__chat_completions_with_model
|
||||
args: >
|
||||
'llama3-8b-8192',
|
||||
[{'role': 'user', 'content': 'Hello, how are you?'}],
|
||||
100,
|
||||
'test-api-key'
|
||||
assertions:
|
||||
- result:choices IS NOT NULL
|
||||
- result:model = 'llama3-8b-8192'
|
||||
- test_udf:
|
||||
name: test_groq__chat_completions_full_params
|
||||
args: >
|
||||
'llama3-8b-8192',
|
||||
[{'role': 'user', 'content': 'Hello, how are you?'}],
|
||||
100,
|
||||
0.5,
|
||||
0.95,
|
||||
0.0,
|
||||
0.0,
|
||||
'test-api-key'
|
||||
assertions:
|
||||
- result:choices IS NOT NULL
|
||||
- result:model = 'llama3-8b-8192'
|
||||
|
||||
- name: quick_chat
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_groq__quick_chat_single_message
|
||||
args: >
|
||||
'Hello, how are you?',
|
||||
'test-api-key'
|
||||
assertions:
|
||||
- result:choices IS NOT NULL
|
||||
- test_udf:
|
||||
name: test_groq__quick_chat_with_system
|
||||
args: >
|
||||
'You are a helpful assistant.',
|
||||
'Hello, how are you?',
|
||||
'test-api-key'
|
||||
assertions:
|
||||
- result:choices IS NOT NULL
|
||||
|
||||
|
||||
- name: extract_response_text
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_groq__extract_response_text
|
||||
args: >
|
||||
{'choices': [{'message': {'content': 'Hello there!'}}]}
|
||||
assertions:
|
||||
- result = 'Hello there!'
|
||||
- test_udf:
|
||||
name: test_groq__extract_response_text_error
|
||||
args: >
|
||||
{'error': {'message': 'API Error occurred'}}
|
||||
assertions:
|
||||
- result = 'API Error occurred'
|
||||
|
||||
- name: post
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_groq_utils__post_health_check
|
||||
args: >
|
||||
'/openai/v1/models',
|
||||
{},
|
||||
'test-api-key'
|
||||
assertions:
|
||||
- result:data IS NOT NULL
|
||||
@ -0,0 +1,5 @@
|
||||
-- depends_on: {{ ref('live') }}
|
||||
{%- set configs = [
|
||||
config_groq_utils_udfs,
|
||||
] -%}
|
||||
{{- ephemeral_deploy_marketplace(configs) -}}
|
||||
26
models/deploy/marketplace/groq/groq_utils__groq_utils.yml
Normal file
26
models/deploy/marketplace/groq/groq_utils__groq_utils.yml
Normal file
@ -0,0 +1,26 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: groq_utils__groq_utils
|
||||
columns:
|
||||
- name: post
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_groq_utils__post_models_endpoint
|
||||
args: >
|
||||
'/openai/v1/models',
|
||||
{},
|
||||
'test-api-key'
|
||||
assertions:
|
||||
- result:data IS NOT NULL
|
||||
- test_udf:
|
||||
name: test_groq_utils__post_chat_endpoint
|
||||
args: >
|
||||
'/openai/v1/chat/completions',
|
||||
{
|
||||
'model': 'llama3-8b-8192',
|
||||
'messages': [{'role': 'user', 'content': 'Hello'}],
|
||||
'max_tokens': 10
|
||||
},
|
||||
'test-api-key'
|
||||
assertions:
|
||||
- result:choices IS NOT NULL
|
||||
6
models/deploy/marketplace/slack/slack__.sql
Normal file
6
models/deploy/marketplace/slack/slack__.sql
Normal file
@ -0,0 +1,6 @@
|
||||
-- depends_on: {{ ref('live') }}
|
||||
-- depends_on: {{ ref('slack_utils__slack_utils') }}
|
||||
{%- set configs = [
|
||||
config_slack_messaging_udfs,
|
||||
] -%}
|
||||
{{- ephemeral_deploy_marketplace(configs) -}}
|
||||
133
models/deploy/marketplace/slack/slack__.yml
Normal file
133
models/deploy/marketplace/slack/slack__.yml
Normal file
@ -0,0 +1,133 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: slack__
|
||||
columns:
|
||||
- name: webhook_send
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_slack__webhook_send_simple
|
||||
args: >
|
||||
'https://httpbin.org/post',
|
||||
{'text': 'Hello from Livequery!'}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:data.json.text = 'Hello from Livequery!'
|
||||
- result IS NOT NULL
|
||||
- test_udf:
|
||||
name: test_slack__webhook_send_rich
|
||||
args: >
|
||||
'https://httpbin.org/post',
|
||||
{
|
||||
'text': 'Pipeline completed!',
|
||||
'username': 'dbt Bot',
|
||||
'icon_emoji': ':bar_chart:',
|
||||
'attachments': [
|
||||
{
|
||||
'color': '#36a64f',
|
||||
'title': 'Success',
|
||||
'fields': [
|
||||
{'title': 'Models', 'value': '5', 'short': true},
|
||||
{'title': 'Failed', 'value': '0', 'short': true}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:data.json.text = 'Pipeline completed!'
|
||||
- result:data.json.username = 'dbt Bot'
|
||||
- result IS NOT NULL
|
||||
|
||||
- name: post_message
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_slack__post_message_simple
|
||||
args: >
|
||||
'fake-test-token',
|
||||
'https://httpbin.org/post',
|
||||
{'text': 'Hello from Livequery!'}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:data.json.text = 'Hello from Livequery!'
|
||||
- result IS NOT NULL
|
||||
- test_udf:
|
||||
name: test_slack__post_message_blocks
|
||||
args: >
|
||||
'fake-test-token',
|
||||
'https://httpbin.org/post',
|
||||
{
|
||||
'text': 'Pipeline completed!',
|
||||
'blocks': [
|
||||
{
|
||||
'type': 'header',
|
||||
'text': {
|
||||
'type': 'plain_text',
|
||||
'text': ':white_check_mark: Pipeline Success'
|
||||
}
|
||||
},
|
||||
{
|
||||
'type': 'section',
|
||||
'fields': [
|
||||
{'type': 'mrkdwn', 'text': '*Repository:*\nFlipsideCrypto/my-repo'},
|
||||
{'type': 'mrkdwn', 'text': '*Duration:*\n15m 30s'}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:data.json.text = 'Pipeline completed!'
|
||||
- result IS NOT NULL
|
||||
|
||||
- name: post_reply
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_slack__post_reply_simple
|
||||
args: >
|
||||
'fake-test-token',
|
||||
'https://httpbin.org/post',
|
||||
'1234567890.123456',
|
||||
{'text': 'Thread reply from Livequery!'}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:data.json.text = 'Thread reply from Livequery!'
|
||||
- result IS NOT NULL
|
||||
|
||||
- name: webhook_send
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_slack__webhook_send_complex_payload
|
||||
args: >
|
||||
'https://httpbin.org/post',
|
||||
{
|
||||
'text': 'Complex test message',
|
||||
'username': 'Test Bot',
|
||||
'icon_emoji': ':test_tube:',
|
||||
'blocks': [
|
||||
{
|
||||
'type': 'header',
|
||||
'text': {
|
||||
'type': 'plain_text',
|
||||
'text': '🧪 Test Results'
|
||||
}
|
||||
},
|
||||
{
|
||||
'type': 'section',
|
||||
'text': {
|
||||
'type': 'mrkdwn',
|
||||
'text': '*All tests passed!* ✅'
|
||||
}
|
||||
}
|
||||
],
|
||||
'attachments': [
|
||||
{
|
||||
'color': '#36a64f',
|
||||
'blocks': []
|
||||
}
|
||||
]
|
||||
}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:data.json.text = 'Complex test message'
|
||||
- result:data.json.username = 'Test Bot'
|
||||
- result IS NOT NULL
|
||||
@ -0,0 +1,5 @@
|
||||
-- depends_on: {{ ref('live') }}
|
||||
{%- set configs = [
|
||||
config_slack_utils_udfs,
|
||||
] -%}
|
||||
{{- ephemeral_deploy_marketplace(configs) -}}
|
||||
158
models/deploy/marketplace/slack/slack_utils__slack_utils.yml
Normal file
158
models/deploy/marketplace/slack/slack_utils__slack_utils.yml
Normal file
@ -0,0 +1,158 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: slack_utils__slack_utils
|
||||
columns:
|
||||
- name: post_webhook
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_slack_utils__post_webhook_httpbin
|
||||
args: >
|
||||
'https://httpbin.org/post',
|
||||
{'text': 'Test message from Livequery'}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:data.json.text = 'Test message from Livequery'
|
||||
- result IS NOT NULL
|
||||
- test_udf:
|
||||
name: test_slack_utils__post_webhook_invalid_url
|
||||
args: >
|
||||
'https://httpbin.org/status/404',
|
||||
{'text': 'Test message'}
|
||||
assertions:
|
||||
- result:status_code = 404
|
||||
- result IS NOT NULL
|
||||
- test_udf:
|
||||
name: test_slack_utils__post_webhook_null_url
|
||||
args: >
|
||||
NULL,
|
||||
{'text': 'Test message'}
|
||||
assertions:
|
||||
- result:ok = false
|
||||
- result:error = 'webhook_url is required'
|
||||
- test_udf:
|
||||
name: test_slack_utils__post_webhook_invalid_format
|
||||
args: >
|
||||
'https://invalid-url.com/webhook',
|
||||
{'text': 'Test message'}
|
||||
assertions:
|
||||
- result:ok = false
|
||||
- result:error = 'Invalid webhook URL format'
|
||||
- test_udf:
|
||||
name: test_slack_utils__post_webhook_null_payload
|
||||
args: >
|
||||
'https://hooks.slack.com/services/T00000000/B00000000/XXXXXXXXXXXXXXXXXXXXXXXX',
|
||||
NULL
|
||||
assertions:
|
||||
- result:ok = false
|
||||
- result:error = 'payload is required'
|
||||
|
||||
- name: post_message
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_slack_utils__post_message_httpbin
|
||||
args: >
|
||||
'fake-test-token',
|
||||
'https://httpbin.org/post',
|
||||
{'text': 'Test message from Livequery'}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:data.json.text = 'Test message from Livequery'
|
||||
- result IS NOT NULL
|
||||
- test_udf:
|
||||
name: test_slack_utils__post_message_auth_error
|
||||
args: >
|
||||
'invalid-token',
|
||||
'https://httpbin.org/status/401',
|
||||
{'text': 'Test message'}
|
||||
assertions:
|
||||
- result:status_code = 401
|
||||
- result IS NOT NULL
|
||||
|
||||
- name: post_reply
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_slack_utils__post_reply_httpbin
|
||||
args: >
|
||||
'fake-test-token',
|
||||
'https://httpbin.org/post',
|
||||
'1234567890.123456',
|
||||
{'text': 'Test reply from Livequery'}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:data.json.text = 'Test reply from Livequery'
|
||||
- result IS NOT NULL
|
||||
|
||||
- name: validate_webhook_url
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_slack_utils__validate_webhook_url_valid
|
||||
args: >
|
||||
'https://hooks.slack.com/services/T00000000/B00000000/XXXXXXXXXXXXXXXXXXXXXXXX'
|
||||
assertions:
|
||||
- result = true
|
||||
- test_udf:
|
||||
name: test_slack_utils__validate_webhook_url_invalid
|
||||
args: >
|
||||
'https://invalid-url.com/webhook'
|
||||
assertions:
|
||||
- result = false
|
||||
|
||||
- name: validate_bot_token
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_slack_utils__validate_bot_token_valid
|
||||
args: >
|
||||
'fake-1234567890-1234567890123-aBcDeFgHiJkLmNoPqRsTuVwX'
|
||||
assertions:
|
||||
- result = true
|
||||
- test_udf:
|
||||
name: test_slack_utils__validate_bot_token_invalid
|
||||
args: >
|
||||
'invalid-token'
|
||||
assertions:
|
||||
- result = false
|
||||
|
||||
- name: validate_channel
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_slack_utils__validate_channel_id
|
||||
args: >
|
||||
'C1234567890'
|
||||
assertions:
|
||||
- result = true
|
||||
- test_udf:
|
||||
name: test_slack_utils__validate_channel_name
|
||||
args: >
|
||||
'#general'
|
||||
assertions:
|
||||
- result = true
|
||||
- test_udf:
|
||||
name: test_slack_utils__validate_channel_dm
|
||||
args: >
|
||||
'D1234567890'
|
||||
assertions:
|
||||
- result = true
|
||||
- test_udf:
|
||||
name: test_slack_utils__validate_channel_group
|
||||
args: >
|
||||
'G1234567890'
|
||||
assertions:
|
||||
- result = true
|
||||
- test_udf:
|
||||
name: test_slack_utils__validate_channel_invalid
|
||||
args: >
|
||||
'invalid-channel'
|
||||
assertions:
|
||||
- result = false
|
||||
- test_udf:
|
||||
name: test_slack_utils__validate_channel_null
|
||||
args: >
|
||||
NULL
|
||||
assertions:
|
||||
- result = false
|
||||
- test_udf:
|
||||
name: test_slack_utils__validate_channel_empty
|
||||
args: >
|
||||
''
|
||||
assertions:
|
||||
- result = false
|
||||
28
tests/generic/test_udtf.sql
Normal file
28
tests/generic/test_udtf.sql
Normal file
@ -0,0 +1,28 @@
|
||||
{% test test_udtf(model, column_name, args, assertions) %}
|
||||
{%- set schema = model | replace("__dbt__cte__", "") -%}
|
||||
{%- set schema = schema.split("__") | first -%}
|
||||
{%- set udf = schema ~ "." ~ column_name -%}
|
||||
|
||||
WITH base_test_data AS
|
||||
(
|
||||
SELECT
|
||||
'{{ udf }}' AS test_name
|
||||
,[{{ args }}] as parameters
|
||||
,COUNT(*) OVER () AS row_count
|
||||
FROM TABLE({{ udf }}({{ args }})) t
|
||||
LIMIT 1
|
||||
)
|
||||
|
||||
{% for assertion in assertions %}
|
||||
SELECT
|
||||
test_name,
|
||||
parameters,
|
||||
$${{ assertion }}$$ AS assertion,
|
||||
$$SELECT * FROM TABLE({{ udf }}({{ args }}))$$ AS sql
|
||||
FROM base_test_data
|
||||
WHERE NOT ({{ assertion }})
|
||||
{% if not loop.last %}
|
||||
UNION ALL
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endtest %}
|
||||
Loading…
Reference in New Issue
Block a user