mirror of
https://github.com/FlipsideCrypto/polygon-models.git
synced 2026-02-06 13:01:46 +00:00
Polygon Overhaul (#233)
* Polygon Overhaul - Update DBT models to match streamline optimizations - Standardization - Added more recent DBT optimizations * Add livequery infra * Add PROD integration
This commit is contained in:
parent
1ba3b65278
commit
fc211f5fbd
@ -6,7 +6,7 @@ on:
|
||||
schedule:
|
||||
# Runs "at 3:00 UTC AM" (see https://crontab.guru)
|
||||
- cron: '0 3 * * *'
|
||||
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
@ -25,7 +25,7 @@ concurrency:
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
|
||||
@ -25,6 +25,13 @@ clean-targets: # directories to be removed by `dbt clean`
|
||||
- "dbt_packages"
|
||||
|
||||
models:
|
||||
polygon_models:
|
||||
deploy:
|
||||
+materialized: ephemeral
|
||||
core:
|
||||
+tags: core
|
||||
marketplace:
|
||||
+tags: marketplace
|
||||
+copy_grants: true
|
||||
|
||||
tests:
|
||||
@ -64,4 +71,27 @@ vars:
|
||||
WAIT: 0
|
||||
OBSERV_FULL_TEST: False
|
||||
BALANCES_START: 0
|
||||
BALANCES_END: 15000000
|
||||
BALANCES_END: 15000000
|
||||
DROP_UDFS_AND_SPS: False
|
||||
STUDIO_TEST_USER_ID: '{{ env_var("STUDIO_TEST_USER_ID", "98d15c30-9fa5-43cd-9c69-3d4c0bb269f5") }}'
|
||||
ENABLE_SNOWFLAKE_SECRETS: '{{ env_var("ENABLE_SNOWFLAKE_SECRETS", "") }}'
|
||||
|
||||
API_INTEGRATION: '{{ var("config")[target.name]["API_INTEGRATION"] }}'
|
||||
EXTERNAL_FUNCTION_URI: '{{ var("config")[target.name]["EXTERNAL_FUNCTION_URI"] }}'
|
||||
ROLES: '{{ var("config")[target.name]["ROLES"] }}'
|
||||
|
||||
config:
|
||||
# The keys correspond to dbt profiles and are case sensitive
|
||||
dev:
|
||||
API_INTEGRATION: AWS_POLYGON_API_DEV
|
||||
EXTERNAL_FUNCTION_URI: rzyjrd54s6.execute-api.us-east-1.amazonaws.com/dev/
|
||||
ROLES:
|
||||
- INTERNAL_DEV
|
||||
prod:
|
||||
API_INTEGRATION: AWS_POLYGON_API
|
||||
EXTERNAL_FUNCTION_URI: p6dhi5vxn4.execute-api.us-east-1.amazonaws.com/prod/
|
||||
ROLES:
|
||||
- VELOCITY_INTERNAL
|
||||
- VELOCITY_ETHEREUM
|
||||
- INTERNAL_DEV
|
||||
- BI_ANALYTICS_READER
|
||||
|
||||
18
macros/core/_live.yaml.sql
Normal file
18
macros/core/_live.yaml.sql
Normal file
@ -0,0 +1,18 @@
|
||||
{% macro config_core__live(schema="_live") %}
|
||||
|
||||
- name: {{ schema }}.udf_api
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [url, STRING]
|
||||
- [headers, OBJECT]
|
||||
- [DATA, OBJECT]
|
||||
- [user_id, STRING]
|
||||
- [SECRET, STRING]
|
||||
return_type: VARIANT
|
||||
func_type: EXTERNAL
|
||||
api_integration: '{{ var("API_INTEGRATION") }}'
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
sql: udf_api
|
||||
{% endmacro %}
|
||||
63
macros/core/_utils.yaml.sql
Normal file
63
macros/core/_utils.yaml.sql
Normal file
@ -0,0 +1,63 @@
|
||||
{% macro config_core__utils(schema="_utils") %}
|
||||
|
||||
- name: {{ schema }}.udf_introspect
|
||||
signature:
|
||||
- [echo, STRING]
|
||||
func_type: SECURE EXTERNAL
|
||||
return_type: TEXT
|
||||
api_integration: '{{ var("API_INTEGRATION") }}'
|
||||
sql: introspect
|
||||
|
||||
- name: {{ schema }}.udf_user_tier
|
||||
signature: []
|
||||
func_type: SECURE
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
MEMOIZABLE
|
||||
sql: |
|
||||
SELECT
|
||||
COALESCE(PARSE_JSON(GETVARIABLE('LIVEQUERY_CONTEXT')):userTier::STRING, 'community')
|
||||
|
||||
- name: {{ schema }}.udf_provider
|
||||
signature: []
|
||||
func_type: SECURE
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
MEMOIZABLE
|
||||
sql: |
|
||||
SELECT
|
||||
COALESCE(PARSE_JSON(GETVARIABLE('LIVEQUERY_CONTEXT')):provider::STRING, 'quicknode')
|
||||
|
||||
- name: {{ schema }}.udf_whoami
|
||||
signature: []
|
||||
func_type: SECURE
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
MEMOIZABLE
|
||||
sql: |
|
||||
SELECT
|
||||
COALESCE(PARSE_JSON(GETVARIABLE('LIVEQUERY_CONTEXT')):userId::STRING, CURRENT_USER())
|
||||
|
||||
- name: {{ schema }}.udf_register_secret
|
||||
signature:
|
||||
- [request_id, STRING]
|
||||
- [user_id, STRING]
|
||||
- [key, STRING]
|
||||
return_type: OBJECT
|
||||
func_type: SECURE EXTERNAL
|
||||
api_integration: '{{ var("API_INTEGRATION") }}'
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
sql: secret/register
|
||||
|
||||
{% endmacro %}
|
||||
123
macros/core/functions.py.sql
Normal file
123
macros/core/functions.py.sql
Normal file
@ -0,0 +1,123 @@
|
||||
{% macro python_hex_to_int() %}
|
||||
def hex_to_int(hex) -> str:
|
||||
"""
|
||||
Converts hex (of any size) to int (as a string). Snowflake and java script can only handle up to 64-bit (38 digits of precision)
|
||||
hex_to_int('200000000000000000000000000000211');
|
||||
>> 680564733841876926926749214863536423441
|
||||
hex_to_int('0x200000000000000000000000000000211');
|
||||
>> 680564733841876926926749214863536423441
|
||||
hex_to_int(NULL);
|
||||
>> NULL
|
||||
"""
|
||||
try:
|
||||
return str(int(hex, 16)) if hex and hex != "0x" else None
|
||||
except:
|
||||
return None
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro python_udf_hex_to_int_with_encoding() %}
|
||||
def hex_to_int(encoding, hex) -> str:
|
||||
"""
|
||||
Converts hex (of any size) to int (as a string). Snowflake and java script can only handle up to 64-bit (38 digits of precision)
|
||||
hex_to_int('hex', '200000000000000000000000000000211');
|
||||
>> 680564733841876926926749214863536423441
|
||||
hex_to_int('hex', '0x200000000000000000000000000000211');
|
||||
>> 680564733841876926926749214863536423441
|
||||
hex_to_int('hex', NULL);
|
||||
>> NULL
|
||||
hex_to_int('s2c', 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffe5b83acf');
|
||||
>> -440911153
|
||||
"""
|
||||
try:
|
||||
if not hex:
|
||||
return None
|
||||
if encoding.lower() == 's2c':
|
||||
if hex[0:2].lower() != '0x':
|
||||
hex = f'0x{hex}'
|
||||
|
||||
bits = len(hex[2:]) * 4
|
||||
value = int(hex, 0)
|
||||
if value & (1 << (bits - 1)):
|
||||
value -= 1 << bits
|
||||
return str(value)
|
||||
else:
|
||||
return str(int(hex, 16))
|
||||
except:
|
||||
return None
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_keccak256() %}
|
||||
from Crypto.Hash import keccak
|
||||
|
||||
def udf_encode(event_name):
|
||||
keccak_hash = keccak.new(digest_bits=256)
|
||||
keccak_hash.update(event_name.encode('utf-8'))
|
||||
return '0x' + keccak_hash.hexdigest()
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_evm_text_signature() %}
|
||||
|
||||
def get_simplified_signature(abi):
|
||||
def generate_signature(inputs):
|
||||
signature_parts = []
|
||||
for input_data in inputs:
|
||||
if 'components' in input_data:
|
||||
component_signature_parts = []
|
||||
components = input_data['components']
|
||||
component_signature_parts.extend(generate_signature(components))
|
||||
component_signature_parts[-1] = component_signature_parts[-1].rstrip(",")
|
||||
if input_data['type'].endswith('[]'):
|
||||
signature_parts.append("(" + "".join(component_signature_parts) + ")[],")
|
||||
else:
|
||||
signature_parts.append("(" + "".join(component_signature_parts) + "),")
|
||||
else:
|
||||
signature_parts.append(input_data['type'].replace('enum ', '').replace(' payable', '') + ",")
|
||||
return signature_parts
|
||||
|
||||
signature_parts = [abi['name'] + "("]
|
||||
signature_parts.extend(generate_signature(abi['inputs']))
|
||||
signature_parts[-1] = signature_parts[-1].rstrip(",") + ")"
|
||||
return "".join(signature_parts)
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro python_object_to_url_query_string() %}
|
||||
from urllib.parse import urlencode
|
||||
|
||||
def object_to_url_query_string(query, doseq=False):
|
||||
{# return type(query) #}
|
||||
if isinstance(query, dict):
|
||||
return urlencode(query, doseq)
|
||||
return urlencode([tuple(i) for i in query], doseq)
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
{% macro python_udf_evm_transform_log(schema) %}
|
||||
from copy import deepcopy
|
||||
|
||||
def transform_event(event: dict):
|
||||
new_event = deepcopy(event)
|
||||
if new_event.get("components"):
|
||||
components = new_event.get("components")
|
||||
for iy, y in enumerate(new_event["value"]):
|
||||
for i, c in enumerate(components):
|
||||
y[i] = {"value": y[i], **c}
|
||||
new_event["value"][iy] = {z["name"]: z["value"] for z in y}
|
||||
return new_event
|
||||
else:
|
||||
return event
|
||||
|
||||
|
||||
def transform(events: list):
|
||||
try:
|
||||
results = [
|
||||
transform_event(event) if event["decoded"] else event
|
||||
for event in events["data"]
|
||||
]
|
||||
events["data"] = results
|
||||
return events
|
||||
except:
|
||||
return events
|
||||
|
||||
{% endmacro %}
|
||||
15
macros/core/functions.sql
Normal file
15
macros/core/functions.sql
Normal file
@ -0,0 +1,15 @@
|
||||
{% macro sql_udf_json_rpc_call(use_default_id=True ) %}
|
||||
SELECT IFF(method IS NULL or params IS NULL,
|
||||
NULL,
|
||||
{
|
||||
'jsonrpc': '2.0',
|
||||
'method': method,
|
||||
'params': params
|
||||
{% if use_default_id %}
|
||||
, 'id': hash(method, params)::string
|
||||
{% else %}
|
||||
, 'id': id
|
||||
{% endif %}
|
||||
}
|
||||
)
|
||||
{% endmacro %}
|
||||
137
macros/core/live.yaml.sql
Normal file
137
macros/core/live.yaml.sql
Normal file
@ -0,0 +1,137 @@
|
||||
{% macro config_core_live(schema="live") %}
|
||||
|
||||
- name: {{ schema }}.udf_api
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [url, STRING]
|
||||
- [headers, OBJECT]
|
||||
- [data, OBJECT]
|
||||
- [secret_name, STRING]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
sql: |
|
||||
SELECT
|
||||
_live.UDF_API(
|
||||
method,
|
||||
url,
|
||||
headers,
|
||||
data,
|
||||
_utils.UDF_WHOAMI(),
|
||||
secret_name
|
||||
)
|
||||
- name: {{ schema }}.udf_api
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [url, STRING]
|
||||
- [headers, OBJECT]
|
||||
- [data, OBJECT]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
sql: |
|
||||
SELECT
|
||||
_live.UDF_API(
|
||||
method,
|
||||
url,
|
||||
headers,
|
||||
data,
|
||||
_utils.UDF_WHOAMI(),
|
||||
''
|
||||
)
|
||||
- name: {{ schema }}.udf_api
|
||||
signature:
|
||||
- [url, STRING]
|
||||
- [data, OBJECT]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
sql: |
|
||||
SELECT
|
||||
_live.UDF_API(
|
||||
'POST',
|
||||
url,
|
||||
{'Content-Type': 'application/json'},
|
||||
data,
|
||||
_utils.UDF_WHOAMI(),
|
||||
''
|
||||
)
|
||||
- name: {{ schema }}.udf_api
|
||||
signature:
|
||||
- [url, STRING]
|
||||
- [data, OBJECT]
|
||||
- [secret_name, STRING]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
sql: |
|
||||
SELECT
|
||||
_live.UDF_API(
|
||||
'POST',
|
||||
url,
|
||||
{'Content-Type': 'application/json'},
|
||||
data,
|
||||
_utils.UDF_WHOAMI(),
|
||||
secret_name
|
||||
)
|
||||
- name: {{ schema }}.udf_api
|
||||
signature:
|
||||
- [url, STRING]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
sql: |
|
||||
SELECT
|
||||
_live.UDF_API(
|
||||
'GET',
|
||||
url,
|
||||
{},
|
||||
{},
|
||||
_utils.UDF_WHOAMI(),
|
||||
''
|
||||
)
|
||||
- name: {{ schema }}.udf_api
|
||||
signature:
|
||||
- [url, STRING]
|
||||
- [secret_name, STRING]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
sql: |
|
||||
SELECT
|
||||
_live.UDF_API(
|
||||
'GET',
|
||||
url,
|
||||
{},
|
||||
{},
|
||||
_utils.UDF_WHOAMI(),
|
||||
secret_name
|
||||
)
|
||||
|
||||
- name: {{ schema }}.udf_rpc
|
||||
signature:
|
||||
- [blockchain, STRING]
|
||||
- [network, STRING]
|
||||
- [method, STRING]
|
||||
- [parameters, VARIANT]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
COMMENT = $$Executes an JSON RPC call on a blockchain.$$
|
||||
sql: |
|
||||
{{ sql_live_rpc_call("method", "parameters", "blockchain", "network") | indent(4) -}}
|
||||
{% endmacro %}
|
||||
87
macros/core/secrets.yaml.sql
Normal file
87
macros/core/secrets.yaml.sql
Normal file
@ -0,0 +1,87 @@
|
||||
{% macro config_core_secrets(schema="secrets") %}
|
||||
|
||||
|
||||
- name: {{ schema }}.udf_register_secret
|
||||
signature:
|
||||
- [request_id, STRING]
|
||||
- [key, STRING]
|
||||
func_type: SECURE
|
||||
return_type: OBJECT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
COMMENT = $$ Registers a secret with the given request ID and key. $$
|
||||
sql: |
|
||||
SELECT
|
||||
_utils.UDF_REGISTER_SECRET(REQUEST_ID, _utils.UDF_WHOAMI(), KEY)
|
||||
|
||||
- name: {{ schema }}.udf_get_secret
|
||||
signature:
|
||||
- [name, STRING]
|
||||
func_type: SECURE
|
||||
return_type: OBJECT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
COMMENT = $$ Returns the secret value for the given secret name. $$
|
||||
sql: |
|
||||
SELECT
|
||||
live.udf_api(
|
||||
CONCAT_WS('/', {{ construct_api_route("secret") }}, _utils.UDF_WHOAMI(), NAME)
|
||||
):data::OBJECT
|
||||
|
||||
- name: {{ schema }}.udf_get_secrets
|
||||
signature: []
|
||||
func_type: SECURE
|
||||
return_type: OBJECT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
COMMENT = $$ Returns all secrets for the current user. $$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ schema }}.udf_get_secret('')
|
||||
|
||||
- name: {{ schema }}.udf_create_secret
|
||||
signature:
|
||||
- [name, STRING]
|
||||
- [secret, OBJECT]
|
||||
func_type: SECURE
|
||||
return_type: [INTEGER, the HTTP status code - 200 indicates success]
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
COMMENT = $$ Creates a new secret with the given name and value. $$
|
||||
sql: |
|
||||
SELECT
|
||||
live.udf_api(
|
||||
CONCAT_WS('/', {{ construct_api_route("secret") }}, _utils.UDF_WHOAMI(), NAME),
|
||||
SECRET
|
||||
):status_code::INTEGER
|
||||
|
||||
- name: {{ schema }}.udf_delete_secret
|
||||
signature:
|
||||
- [name, STRING]
|
||||
func_type: SECURE
|
||||
return_type: [INTEGER, the HTTP status code - 200 indicates success]
|
||||
options: |
|
||||
NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
COMMENT = $$ Deletes the secret with the given name. $$
|
||||
sql: |
|
||||
SELECT
|
||||
live.udf_api(
|
||||
'DELETE',
|
||||
CONCAT_WS('/', {{ construct_api_route("secret") }}, _utils.UDF_WHOAMI(), NAME),
|
||||
{},
|
||||
{},
|
||||
''
|
||||
):status_code::INTEGER
|
||||
|
||||
|
||||
{% endmacro %}
|
||||
241
macros/core/utils.yaml.sql
Normal file
241
macros/core/utils.yaml.sql
Normal file
@ -0,0 +1,241 @@
|
||||
{% macro config_core_utils(schema="utils") %}
|
||||
|
||||
|
||||
- name: {{ schema }}.udf_register_secret
|
||||
signature:
|
||||
- [request_id, STRING]
|
||||
- [key, STRING]
|
||||
func_type: SECURE
|
||||
return_type: OBJECT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: |
|
||||
SELECT
|
||||
_utils.UDF_REGISTER_SECRET(REQUEST_ID, _utils.UDF_WHOAMI(), KEY)
|
||||
|
||||
- name: {{ schema }}.udf_hex_to_int
|
||||
signature:
|
||||
- [hex, STRING]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE PYTHON
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
RUNTIME_VERSION = '3.8'
|
||||
HANDLER = 'hex_to_int'
|
||||
sql: |
|
||||
{{ python_hex_to_int() | indent(4) }}
|
||||
- name: {{ schema }}.udf_hex_to_int
|
||||
signature:
|
||||
- [encoding, STRING]
|
||||
- [hex, STRING]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE PYTHON
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
RUNTIME_VERSION = '3.8'
|
||||
HANDLER = 'hex_to_int'
|
||||
sql: |
|
||||
{{ python_udf_hex_to_int_with_encoding() | indent(4) }}
|
||||
- name: {{ schema }}.udf_evm_text_signature
|
||||
signature:
|
||||
- [abi, VARIANT]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
LANGUAGE PYTHON
|
||||
RUNTIME_VERSION = '3.8'
|
||||
HANDLER = 'get_simplified_signature'
|
||||
sql: |
|
||||
{{ create_udf_evm_text_signature() | indent(4) }}
|
||||
- name: {{ schema }}.udf_keccak256
|
||||
signature:
|
||||
- [event_name, VARCHAR(255)]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
LANGUAGE PYTHON
|
||||
RUNTIME_VERSION = '3.8'
|
||||
PACKAGES = ('pycryptodome==3.15.0')
|
||||
HANDLER = 'udf_encode'
|
||||
sql: |
|
||||
{{ create_udf_keccak256() | indent(4) }}
|
||||
- name: {{ schema }}.udf_hex_to_string
|
||||
signature:
|
||||
- [hex, STRING]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE SQL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: |
|
||||
SELECT
|
||||
LTRIM(regexp_replace(
|
||||
try_hex_decode_string(hex),
|
||||
'[\x00-\x1F\x7F-\x9F\xAD]', '', 1))
|
||||
- name: {{ schema }}.udf_int_to_hex
|
||||
signature:
|
||||
- [int, INTEGER]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE SQL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: |
|
||||
select CONCAT('0x', TRIM(TO_CHAR(int, 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX')))
|
||||
|
||||
- name: {{ schema }}.udf_json_rpc_call
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [params, ARRAY]
|
||||
return_type: OBJECT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE SQL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: |
|
||||
{{ sql_udf_json_rpc_call() }}
|
||||
- name: {{ schema }}.udf_json_rpc_call
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [params, OBJECT]
|
||||
return_type: OBJECT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE SQL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: |
|
||||
{{ sql_udf_json_rpc_call() }}
|
||||
- name: {{ schema }}.udf_json_rpc_call
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [params, OBJECT]
|
||||
- [id, STRING]
|
||||
return_type: OBJECT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE SQL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: |
|
||||
{{ sql_udf_json_rpc_call(False) }}
|
||||
- name: {{ schema }}.udf_json_rpc_call
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [params, ARRAY]
|
||||
- [id, STRING]
|
||||
return_type: OBJECT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE SQL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: |
|
||||
{{ sql_udf_json_rpc_call(False) }}
|
||||
|
||||
- name: {{ schema }}.udf_urlencode
|
||||
signature:
|
||||
- [query, OBJECT]
|
||||
- [doseq, BOOLEAN]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE PYTHON
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
RUNTIME_VERSION = '3.8'
|
||||
COMMENT=$$Pthon (function)[https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode] to convert an object to a URL query string.$$
|
||||
HANDLER = 'object_to_url_query_string'
|
||||
sql: |
|
||||
{{ python_object_to_url_query_string() | indent(4) }}
|
||||
- name: {{ schema }}.udf_urlencode
|
||||
signature:
|
||||
- [query, ARRAY]
|
||||
- [doseq, BOOLEAN]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE PYTHON
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
RUNTIME_VERSION = '3.8'
|
||||
COMMENT=$$Pthon (function)[https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode] to convert an array to a URL query string.$$
|
||||
HANDLER = 'object_to_url_query_string'
|
||||
sql: |
|
||||
{{ python_object_to_url_query_string() | indent(4) }}
|
||||
- name: {{ schema }}.udf_urlencode
|
||||
signature:
|
||||
- [query, ARRAY]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE SQL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: |
|
||||
SELECT {{ schema }}.udf_urlencode(query, FALSE)
|
||||
- name: {{ schema }}.udf_urlencode
|
||||
signature:
|
||||
- [query, OBJECT]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE SQL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: |
|
||||
SELECT {{ schema }}.udf_urlencode(query, FALSE)
|
||||
- name: {{ schema }}.udf_object_to_url_query_string
|
||||
signature:
|
||||
- [object, OBJECT]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: SELECT utils.udf_urlencode(object, FALSE)
|
||||
|
||||
- name: {{ schema }}.udf_evm_transform_log
|
||||
signature:
|
||||
- [decoded, VARIANT]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE PYTHON
|
||||
IMMUTABLE
|
||||
RUNTIME_VERSION = '3.8'
|
||||
HANDLER = 'transform'
|
||||
sql: |
|
||||
{{ python_udf_evm_transform_log() | indent(4) }}
|
||||
|
||||
- name: {{ schema }}.udf_evm_decode_log
|
||||
signature:
|
||||
- [abi, ARRAY]
|
||||
- [data, OBJECT]
|
||||
return_type: ARRAY
|
||||
func_type: EXTERNAL
|
||||
api_integration: '{{ var("API_INTEGRATION") }}'
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
sql: evm/decode/log
|
||||
- name: {{ schema }}.udf_evm_decode_log
|
||||
signature:
|
||||
- [abi, OBJECT]
|
||||
- [data, OBJECT]
|
||||
return_type: ARRAY
|
||||
func_type: EXTERNAL
|
||||
api_integration: '{{ var("API_INTEGRATION") }}'
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
sql: evm/decode/log
|
||||
|
||||
{% endmacro %}
|
||||
@ -11,7 +11,6 @@
|
||||
{% set sql %}
|
||||
{{ create_udf_get_chainhead() }}
|
||||
{{ create_udf_bulk_json_rpc() }}
|
||||
{{ create_udf_bulk_get_traces() }}
|
||||
{{ create_udf_decode_array_string() }}
|
||||
{{ create_udf_decode_array_object() }}
|
||||
{{ create_udf_bulk_decode_logs() }}
|
||||
|
||||
51
macros/grants.sql
Normal file
51
macros/grants.sql
Normal file
@ -0,0 +1,51 @@
|
||||
{% macro apply_grants_by_schema(schema) %}
|
||||
{#
|
||||
Generates SQL to grant permissions to roles for a given schema.
|
||||
This gets run automatically when a deployment is made to prod.
|
||||
|
||||
This can be manually run to grant permissions to a new schema:
|
||||
`dbt run-operation apply_grants_by_schema --args '{"schema": "my_schema"}'`
|
||||
#}
|
||||
{%- set outer = namespace(sql="") -%}
|
||||
{% for role in fromyaml(var("ROLES")) %}
|
||||
{% set sql -%}
|
||||
{% if schema.startswith("_") %}
|
||||
REVOKE USAGE ON SCHEMA {{ target.database }}.{{ schema }} FROM {{ role }};
|
||||
REVOKE USAGE ON ALL FUNCTIONS IN SCHEMA {{ target.database }}.{{ schema }} FROM {{ role }};
|
||||
{%- else -%}
|
||||
GRANT USAGE ON SCHEMA {{ target.database }}.{{ schema }} TO {{ role }};
|
||||
GRANT USAGE ON ALL FUNCTIONS IN SCHEMA {{ target.database }}.{{ schema }} TO {{ role }};
|
||||
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA {{ target.database }}.{{ schema }} TO {{ role }};
|
||||
GRANT SELECT ON ALL VIEWS IN SCHEMA {{ target.database }}.{{ schema }} TO {{ role }};
|
||||
{%- endif -%}
|
||||
{%- endset -%}
|
||||
{%- set outer.sql = outer.sql ~ sql -%}
|
||||
{%- endfor -%}
|
||||
{{ outer.sql }}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro apply_grants_to_all_schema() %}
|
||||
{#
|
||||
Run SQL to grant permissions to roles for all schemas.
|
||||
This is useful for when a new role is created and needs to be granted access to all schemas.
|
||||
This is not used in the normal grant process.
|
||||
|
||||
`dbt run-operation apply_grants_to_all_schema`
|
||||
#}
|
||||
{% if execute and target.name == "prod" %}
|
||||
{% set sql_get_schema %}
|
||||
SELECT SCHEMA_NAME
|
||||
FROM {{ target.database }}.INFORMATION_SCHEMA.SCHEMATA
|
||||
WHERE SCHEMA_NAME NOT IN ('PUBLIC', 'INFORMATION_SCHEMA')
|
||||
{%- endset -%}
|
||||
{%- set results = run_query(sql_get_schema) -%}
|
||||
{% set sql_apply_grants %}
|
||||
{%- for schema in results.columns[0].values() -%}
|
||||
{{ apply_grants_by_schema(schema) }}
|
||||
{%- endfor -%}
|
||||
{%- endset -%}
|
||||
{% do log(sql_apply_grants, true) %}
|
||||
{% do run_query(sql_apply_grants) %}
|
||||
{%- endif -%}
|
||||
{%- endmacro -%}
|
||||
148
macros/marketplace/github/actions_udfs.yaml.sql
Normal file
148
macros/marketplace/github/actions_udfs.yaml.sql
Normal file
@ -0,0 +1,148 @@
|
||||
{% macro config_github_actions_udfs(schema_name = "github_actions", utils_schema_name = "github_utils") -%}
|
||||
{#
|
||||
This macro is used to generate the Github API Calls
|
||||
#}
|
||||
- name: {{ schema_name -}}.workflows
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$[List repository workflows](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#list-repository-workflows).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.GET(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows'),
|
||||
query
|
||||
):data::OBJECT
|
||||
- name: {{ schema_name -}}.workflows
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$[List repository workflows](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#list-repository-workflows).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ schema_name -}}.workflows(owner, repo, {})
|
||||
|
||||
- name: {{ schema_name -}}.runs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Lists all workflow runs for a repository. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-repository).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.GET(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/runs'),
|
||||
query
|
||||
):data::OBJECT
|
||||
- name: {{ schema_name -}}.runs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Lists all workflow runs for a repository. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-repository).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ schema_name -}}.runs(owner, repo, {})
|
||||
|
||||
- name: {{ schema_name -}}.workflow_runs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [workflow_id, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.GET(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'runs'),
|
||||
query
|
||||
):data::OBJECT
|
||||
- name: {{ schema_name -}}.workflow_runs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [workflow_id, "TEXT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ schema_name -}}.workflow_runs(owner, repo, workflow_id, {})
|
||||
|
||||
- name: {{ schema_name -}}.workflow_dispatches
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [workflow_id, "TEXT"]
|
||||
- [body, "OBJECT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace workflow_id with the workflow file name. For example, you could use main.yaml. [Docs](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#create-a-workflow-dispatch-event).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.POST(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'dispatches'),
|
||||
COALESCE(body, {'ref': 'main'})::OBJECT
|
||||
)::OBJECT
|
||||
|
||||
- name: {{ schema_name -}}.workflow_dispatches
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [workflow_id, "TEXT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace workflow_id with the workflow file name. For example, you could use main.yaml. [Docs](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#create-a-workflow-dispatch-event).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ schema_name -}}.workflow_dispatches(owner, repo, workflow_id, NULL)
|
||||
|
||||
- name: {{ schema_name -}}.workflow_enable
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [workflow_id, "TEXT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Enables a workflow. You can replace workflow_id with the workflow file name. For example, you could use main.yaml. [Docs](https://docs.github.com/en/rest/reference/actions#enable-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.PUT(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'enable'),
|
||||
{}
|
||||
)::OBJECT
|
||||
- name: {{ schema_name -}}.workflow_disable
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [workflow_id, "TEXT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Disables a workflow. You can replace workflow_id with the workflow file name. For example, you could use main.yaml. [Docs](https://docs.github.com/en/rest/reference/actions#disable-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.PUT(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'disable'),
|
||||
{}
|
||||
)::OBJECT
|
||||
{% endmacro %}
|
||||
169
macros/marketplace/github/actions_udtfs.yml.sql
Normal file
169
macros/marketplace/github/actions_udtfs.yml.sql
Normal file
@ -0,0 +1,169 @@
|
||||
{% macro config_github_actions_udtfs(schema_name = "github_actions", utils_schema_name = "github_utils") -%}
|
||||
{#
|
||||
This macro is used to generate the Github API Calls
|
||||
#}
|
||||
- name: {{ schema_name -}}.tf_workflows
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "TABLE(id INTEGER, badge_url STRING, created_at TIMESTAMP, html_url STRING, name STRING, node_id STRING, path STRING, state STRING, updated_at TIMESTAMP, url STRING)"
|
||||
options: |
|
||||
COMMENT = $$[List repository workflows](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#list-repository-workflows).$$
|
||||
sql: |
|
||||
WITH workflows AS
|
||||
(
|
||||
SELECT
|
||||
github_actions.workflows(OWNER, REPO, QUERY) AS response
|
||||
)
|
||||
SELECT
|
||||
value:id::INTEGER AS id
|
||||
,value:badge_url::STRING AS badge_url
|
||||
,value:created_at::TIMESTAMP AS created_at
|
||||
,value:html_url::STRING AS html_url
|
||||
,value:name::STRING AS name
|
||||
,value:node_id::STRING AS node_id
|
||||
,value:path::STRING AS path
|
||||
,value:state::STRING AS state
|
||||
,value:updated_at::TIMESTAMP AS updated_at
|
||||
,value:url::STRING AS url
|
||||
FROM workflows, LATERAL FLATTEN( input=> response:workflows)
|
||||
- name: {{ schema_name -}}.tf_workflows
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
return_type:
|
||||
- "TABLE(id INTEGER, badge_url STRING, created_at TIMESTAMP, html_url STRING, name STRING, node_id STRING, path STRING, state STRING, updated_at TIMESTAMP, url STRING)"
|
||||
options: |
|
||||
COMMENT = $$[List repository workflows](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#list-repository-workflows).$$
|
||||
sql: |
|
||||
SELECT *
|
||||
FROM TABLE({{ schema_name -}}.tf_workflows(owner, repo, {}))
|
||||
|
||||
- name: {{ schema_name -}}.tf_runs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "TABLE(id NUMBER, name STRING, node_id STRING, check_suite_id NUMBER, check_suite_node_id STRING, head_branch STRING, head_sha STRING, run_number NUMBER, event STRING, display_title STRING, status STRING, conclusion STRING, workflow_id NUMBER, url STRING, html_url STRING, pull_requests STRING, created_at TIMESTAMP, updated_at TIMESTAMP, actor OBJECT, run_attempt STRING, run_started_at TIMESTAMP, triggering_actor OBJECT, jobs_url STRING, logs_url STRING, check_suite_url STRING, artifacts_url STRING, cancel_url STRING, rerun_url STRING, workflow_url STRING, head_commit OBJECT, repository OBJECT, head_repository OBJECT)"
|
||||
options: |
|
||||
COMMENT = $$Lists all workflow runs for a repository. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-repository).$$
|
||||
|
||||
sql: |
|
||||
WITH response AS
|
||||
(
|
||||
SELECT
|
||||
github_actions.runs(OWNER, REPO, QUERY) AS response
|
||||
)
|
||||
SELECT
|
||||
value:id::NUMBER AS id
|
||||
,value:name::STRING AS name
|
||||
,value:node_id::STRING AS node_id
|
||||
,value:check_suite_id::NUMBER AS check_suite_id
|
||||
,value:check_suite_node_id::STRING AS check_suite_node_id
|
||||
,value:head_branch::STRING AS head_branch
|
||||
,value:head_sha::STRING AS head_sha
|
||||
,value:run_number::NUMBER AS run_number
|
||||
,value:event::STRING AS event
|
||||
,value:display_title::STRING AS display_title
|
||||
,value:status::STRING AS status
|
||||
,value:conclusion::STRING AS conclusion
|
||||
,value:workflow_id::NUMBER AS workflow_id
|
||||
,value:url::STRING AS url
|
||||
,value:html_url::STRING AS html_url
|
||||
,value:pull_requests::STRING AS pull_requests
|
||||
,value:created_at::TIMESTAMP AS created_at
|
||||
,value:updated_at::TIMESTAMP AS updated_at
|
||||
,value:actor::OBJECT AS actor
|
||||
,value:run_attempt::STRING AS run_attempt
|
||||
,value:run_started_at::TIMESTAMP AS run_started_at
|
||||
,value:triggering_actor::OBJECT AS triggering_actor
|
||||
,value:jobs_url::STRING AS jobs_url
|
||||
,value:logs_url::STRING AS logs_url
|
||||
,value:check_suite_url::STRING AS check_suite_url
|
||||
,value:artifacts_url::STRING AS artifacts_url
|
||||
,value:cancel_url::STRING AS cancel_url
|
||||
,value:rerun_url::STRING AS rerun_url
|
||||
,value:workflow_url::STRING AS workflow_url
|
||||
,value:head_commit::OBJECT AS head_commit
|
||||
,value:repository::OBJECT AS repository
|
||||
,value:head_repository::OBJECT AS head_repository
|
||||
FROM response, LATERAL FLATTEN( input=> response:workflow_runs)
|
||||
- name: {{ schema_name -}}.tf_runs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
return_type:
|
||||
- "TABLE(id NUMBER, name STRING, node_id STRING, check_suite_id NUMBER, check_suite_node_id STRING, head_branch STRING, head_sha STRING, run_number NUMBER, event STRING, display_title STRING, status STRING, conclusion STRING, workflow_id NUMBER, url STRING, html_url STRING, pull_requests STRING, created_at TIMESTAMP, updated_at TIMESTAMP, actor OBJECT, run_attempt STRING, run_started_at TIMESTAMP, triggering_actor OBJECT, jobs_url STRING, logs_url STRING, check_suite_url STRING, artifacts_url STRING, cancel_url STRING, rerun_url STRING, workflow_url STRING, head_commit OBJECT, repository OBJECT, head_repository OBJECT)"
|
||||
options: |
|
||||
COMMENT = $$Lists all workflow runs for a repository. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-repository).$$
|
||||
sql: |
|
||||
SELECT *
|
||||
FROM TABLE({{ schema_name -}}.tf_runs(owner, repo, {}))
|
||||
|
||||
- name: {{ schema_name -}}.tf_workflow_runs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [workflkow_id, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "TABLE(id NUMBER, name STRING, node_id STRING, check_suite_id NUMBER, check_suite_node_id STRING, head_branch STRING, head_sha STRING, run_number NUMBER, event STRING, display_title STRING, status STRING, conclusion STRING, workflow_id NUMBER, url STRING, html_url STRING, pull_requests STRING, created_at TIMESTAMP, updated_at TIMESTAMP, actor OBJECT, run_attempt STRING, run_started_at TIMESTAMP, triggering_actor OBJECT, jobs_url STRING, logs_url STRING, check_suite_url STRING, artifacts_url STRING, cancel_url STRING, rerun_url STRING, workflow_url STRING, head_commit OBJECT, repository OBJECT, head_repository OBJECT)"
|
||||
options: |
|
||||
COMMENT = $$Lists all workflow runs for a repository. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-repository).$$
|
||||
sql: |
|
||||
WITH response AS
|
||||
(
|
||||
SELECT
|
||||
github_actions.workflow_runs(OWNER, REPO, WORKFLKOW_ID, QUERY) AS response
|
||||
)
|
||||
SELECT
|
||||
value:id::NUMBER AS id
|
||||
,value:name::STRING AS name
|
||||
,value:node_id::STRING AS node_id
|
||||
,value:check_suite_id::NUMBER AS check_suite_id
|
||||
,value:check_suite_node_id::STRING AS check_suite_node_id
|
||||
,value:head_branch::STRING AS head_branch
|
||||
,value:head_sha::STRING AS head_sha
|
||||
,value:run_number::NUMBER AS run_number
|
||||
,value:event::STRING AS event
|
||||
,value:display_title::STRING AS display_title
|
||||
,value:status::STRING AS status
|
||||
,value:conclusion::STRING AS conclusion
|
||||
,value:workflow_id::NUMBER AS workflow_id
|
||||
,value:url::STRING AS url
|
||||
,value:html_url::STRING AS html_url
|
||||
,value:pull_requests::STRING AS pull_requests
|
||||
,value:created_at::TIMESTAMP AS created_at
|
||||
,value:updated_at::TIMESTAMP AS updated_at
|
||||
,value:actor::OBJECT AS actor
|
||||
,value:run_attempt::STRING AS run_attempt
|
||||
,value:run_started_at::TIMESTAMP AS run_started_at
|
||||
,value:triggering_actor::OBJECT AS triggering_actor
|
||||
,value:jobs_url::STRING AS jobs_url
|
||||
,value:logs_url::STRING AS logs_url
|
||||
,value:check_suite_url::STRING AS check_suite_url
|
||||
,value:artifacts_url::STRING AS artifacts_url
|
||||
,value:cancel_url::STRING AS cancel_url
|
||||
,value:rerun_url::STRING AS rerun_url
|
||||
,value:workflow_url::STRING AS workflow_url
|
||||
,value:head_commit::OBJECT AS head_commit
|
||||
,value:repository::OBJECT AS repository
|
||||
,value:head_repository::OBJECT AS head_repository
|
||||
FROM response, LATERAL FLATTEN( input=> response:workflow_runs)
|
||||
- name: {{ schema_name -}}.tf_workflow_runs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [workflkow_id, "TEXT"]
|
||||
return_type:
|
||||
- "TABLE(id NUMBER, name STRING, node_id STRING, check_suite_id NUMBER, check_suite_node_id STRING, head_branch STRING, head_sha STRING, run_number NUMBER, event STRING, display_title STRING, status STRING, conclusion STRING, workflow_id NUMBER, url STRING, html_url STRING, pull_requests STRING, created_at TIMESTAMP, updated_at TIMESTAMP, actor OBJECT, run_attempt STRING, run_started_at TIMESTAMP, triggering_actor OBJECT, jobs_url STRING, logs_url STRING, check_suite_url STRING, artifacts_url STRING, cancel_url STRING, rerun_url STRING, workflow_url STRING, head_commit OBJECT, repository OBJECT, head_repository OBJECT)"
|
||||
options: |
|
||||
COMMENT = $$Lists all workflow runs for a repository. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-repository).$$
|
||||
sql: |
|
||||
SELECT *
|
||||
FROM TABLE({{ schema_name -}}.tf_workflow_runs(owner, repo, WORKFLKOW_ID, {}))
|
||||
|
||||
{% endmacro %}
|
||||
87
macros/marketplace/github/utils_udfs.yaml.sql
Normal file
87
macros/marketplace/github/utils_udfs.yaml.sql
Normal file
@ -0,0 +1,87 @@
|
||||
{% macro config_github_utils_udfs(schema_name = "github_utils", utils_schema_name = "github_utils") -%}
|
||||
{#
|
||||
This macro is used to generate the Github API Calls
|
||||
#}
|
||||
- name: {{ schema_name -}}.octocat
|
||||
signature:
|
||||
- []
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Verify token [Authenticating to the REST API](https://docs.github.com/en/rest/overview/authenticating-to-the-rest-api?apiVersion=2022-11-28).$$
|
||||
sql: |
|
||||
SELECT
|
||||
live.udf_api(
|
||||
'GET',
|
||||
'https://api.github.com/octocat',
|
||||
{'Authorization': 'Bearer {TOKEN}',
|
||||
'X-GitHub-Api-Version': '2022-11-28'},
|
||||
{},
|
||||
'_FSC_SYS/GITHUB'
|
||||
) as response
|
||||
|
||||
- name: {{ schema_name -}}.headers
|
||||
signature: []
|
||||
return_type:
|
||||
- "TEXT"
|
||||
options: |
|
||||
NOT NULL
|
||||
IMMUTABLE
|
||||
MEMOIZABLE
|
||||
sql: |
|
||||
SELECT '{"Authorization": "Bearer {TOKEN}",
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
"Accept": "application/vnd.github+json"}'
|
||||
|
||||
- name: {{ schema_name -}}.get
|
||||
signature:
|
||||
- [route, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
live.udf_api(
|
||||
'GET',
|
||||
CONCAT_WS('/', 'https://api.github.com', route || '?') || utils.udf_urlencode(query),
|
||||
PARSE_JSON({{ schema_name -}}.headers()),
|
||||
{},
|
||||
'_FSC_SYS/GITHUB'
|
||||
)
|
||||
- name: {{ schema_name -}}.post
|
||||
signature:
|
||||
- [route, "TEXT"]
|
||||
- [data, "OBJECT"]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
live.udf_api(
|
||||
'POST',
|
||||
CONCAT_WS('/', 'https://api.github.com', route),
|
||||
PARSE_JSON({{ schema_name -}}.headers()),
|
||||
data,
|
||||
'_FSC_SYS/GITHUB'
|
||||
)
|
||||
- name: {{ schema_name -}}.put
|
||||
signature:
|
||||
- [route, "TEXT"]
|
||||
- [data, "OBJECT"]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
live.udf_api(
|
||||
'PUT',
|
||||
CONCAT_WS('/', 'https://api.github.com', route),
|
||||
PARSE_JSON({{ schema_name -}}.headers()),
|
||||
data,
|
||||
'_FSC_SYS/GITHUB'
|
||||
)
|
||||
{% endmacro %}
|
||||
@ -1,9 +1,19 @@
|
||||
{% macro create_aws_polygon_api() %}
|
||||
{{ log(
|
||||
"Creating integration for target:" ~ target
|
||||
) }}
|
||||
|
||||
{% if target.name == "prod" %}
|
||||
{% set sql %}
|
||||
CREATE api integration IF NOT EXISTS aws_polygon_api api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/snowflake-api-polygon' api_allowed_prefixes = (
|
||||
'https://088pv40k78.execute-api.us-east-1.amazonaws.com/prod/',
|
||||
'https://ug2z7nx4bi.execute-api.us-east-1.amazonaws.com/dev/'
|
||||
CREATE api integration IF NOT EXISTS aws_polygon_api api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/polygon-api-prod-rolesnowflakeudfsAF733095-9hTxS6yYCWlj' api_allowed_prefixes = (
|
||||
'https://p6dhi5vxn4.execute-api.us-east-1.amazonaws.com/prod/'
|
||||
) enabled = TRUE;
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% elif target.name == "dev" %}
|
||||
{% set sql %}
|
||||
CREATE api integration IF NOT EXISTS aws_polygon_api_dev api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/polygon-api-dev-rolesnowflakeudfsAF733095-10H2D361D3DJD' api_allowed_prefixes = (
|
||||
'https://rzyjrd54s6.execute-api.us-east-1.amazonaws.com/dev/'
|
||||
) enabled = TRUE;
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
|
||||
215
macros/streamline/manage_udfs.sql
Normal file
215
macros/streamline/manage_udfs.sql
Normal file
@ -0,0 +1,215 @@
|
||||
{% macro drop_function(
|
||||
func_name,
|
||||
signature
|
||||
) %}
|
||||
DROP FUNCTION IF EXISTS {{ func_name }}({{ compile_signature(signature, drop_ = True) }});
|
||||
{% endmacro %}
|
||||
|
||||
{%- macro construct_api_route(route) -%}
|
||||
'https://{{ var("EXTERNAL_FUNCTION_URI") | lower }}{{ route }}'
|
||||
{%- endmacro -%}
|
||||
|
||||
{%- macro compile_signature(
|
||||
params,
|
||||
drop_ = False
|
||||
) -%}
|
||||
{% for p in params -%}
|
||||
{%- set name = p.0 -%}
|
||||
{%- set data_type = p.1 -%}
|
||||
{% if drop_ %}
|
||||
{{ data_type -}}
|
||||
{% else %}
|
||||
{{ name ~ " " ~ data_type -}}
|
||||
{%- endif -%}
|
||||
{%-if not loop.last -%},
|
||||
{%- endif -%}
|
||||
{% endfor -%}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro create_sql_function(
|
||||
name_,
|
||||
signature,
|
||||
return_type,
|
||||
sql_,
|
||||
api_integration = none,
|
||||
options = none,
|
||||
func_type = none
|
||||
) %}
|
||||
CREATE OR REPLACE {{ func_type }} FUNCTION {{ name_ }}(
|
||||
{{- compile_signature(signature) }}
|
||||
)
|
||||
COPY GRANTS
|
||||
RETURNS {{ return_type }}
|
||||
{% if options -%}
|
||||
{{ options }}
|
||||
{% endif %}
|
||||
{%- if api_integration -%}
|
||||
api_integration = {{ api_integration }}
|
||||
AS {{ construct_api_route(sql_) ~ ";" }}
|
||||
{% else -%}
|
||||
AS
|
||||
$$
|
||||
{{ sql_ }}
|
||||
$$;
|
||||
{%- endif -%}
|
||||
{%- endmacro -%}
|
||||
|
||||
{%- macro create_or_drop_function_from_config(
|
||||
config,
|
||||
drop_ = False
|
||||
) -%}
|
||||
{% set name_ = config ["name"] %}
|
||||
{% set signature = config ["signature"] %}
|
||||
{% set return_type = config ["return_type"] if config ["return_type"] is string else config ["return_type"][0] %}
|
||||
{% set sql_ = config ["sql"] %}
|
||||
{% set options = config ["options"] %}
|
||||
{% set api_integration = config ["api_integration"] %}
|
||||
{% set func_type = config ["func_type"] %}
|
||||
|
||||
{% if not drop_ -%}
|
||||
{{ create_sql_function(
|
||||
name_ = name_,
|
||||
signature = signature,
|
||||
return_type = return_type,
|
||||
sql_ = sql_,
|
||||
options = options,
|
||||
api_integration = api_integration,
|
||||
func_type = func_type
|
||||
) }}
|
||||
{%- else -%}
|
||||
{{ drop_function(
|
||||
name_,
|
||||
signature = signature,
|
||||
) }}
|
||||
{%- endif %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro crud_udfs(config_func, schema, drop_) %}
|
||||
{#
|
||||
Generate create or drop statements for a list of udf configs for a given schema
|
||||
|
||||
config_func: function that returns a list of udf configs
|
||||
drop_: whether to drop or create the udfs
|
||||
#}
|
||||
{% set udfs = fromyaml(config_func())%}
|
||||
{%- for udf in udfs -%}
|
||||
{% if udf["name"].split(".") | first == schema %}
|
||||
CREATE SCHEMA IF NOT EXISTS {{ schema }};
|
||||
{{- create_or_drop_function_from_config(udf, drop_=drop_) -}}
|
||||
{%- endif -%}
|
||||
{%- endfor -%}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro crud_udfs_by_chain(config_func, blockchain, network, drop_) %}
|
||||
{#
|
||||
Generate create or drop statements for a list of udf configs for a given blockchain and network
|
||||
|
||||
config_func: function that returns a list of udf configs
|
||||
blockchain: blockchain name
|
||||
network: network name
|
||||
drop_: whether to drop or create the udfs
|
||||
#}
|
||||
{% set schema = blockchain if not network else blockchain ~ "_" ~ network %}
|
||||
CREATE SCHEMA IF NOT EXISTS {{ schema }};
|
||||
{%- set configs = fromyaml(config_func(blockchain, network)) if network else fromyaml(config_func(schema, blockchain)) -%}
|
||||
{%- for udf in configs -%}
|
||||
{{- create_or_drop_function_from_config(udf, drop_=drop_) -}}
|
||||
{%- endfor -%}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro crud_udfs_by_marketplace(config_func, schema, utility_schema, drop_) %}
|
||||
{#
|
||||
Generate create or drop statements for a list of udf configs for a given blockchain and network
|
||||
|
||||
config_func: function that returns a list of udf configs
|
||||
schema: schema name
|
||||
utility_schema: utility schema name
|
||||
#}
|
||||
CREATE SCHEMA IF NOT EXISTS {{ schema }};
|
||||
{%- set configs = fromyaml(config_func(schema, utility_schema)) if utility_schema else fromyaml(config_func(schema, schema)) -%}
|
||||
{%- for udf in configs -%}
|
||||
{{- create_or_drop_function_from_config(udf, drop_=drop_) -}}
|
||||
{%- endfor -%}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro crud_marketplace_udfs(config_func, schemaName, base_api_schema_name, drop_) %}
|
||||
{#
|
||||
Generate create or drop statements for a list of udf configs for a given schema and api
|
||||
|
||||
config_func: function that returns a list of udf configs
|
||||
schemaName: the target schema to build the udfs
|
||||
base_api_schema_name: the schema that contains base api functions
|
||||
drop_: whether to drop or create the udfs
|
||||
#}
|
||||
{%- set udfs = fromyaml(config_func(schemaName, base_api_schema_name)) -%}
|
||||
{%- for udf in udfs -%}
|
||||
{{- create_or_drop_function_from_config(udf, drop_=drop_) -}}
|
||||
{%- endfor -%}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro ephemeral_deploy_core(config) %}
|
||||
{#
|
||||
This macro is used to deploy functions using ephemeral models.
|
||||
It should only be used within an ephemeral model.
|
||||
#}
|
||||
{% if execute and (var("UPDATE_UDFS_AND_SPS") or var("DROP_UDFS_AND_SPS")) and model.unique_id in selected_resources %}
|
||||
{% set sql %}
|
||||
{{- crud_udfs(config, this.schema, var("DROP_UDFS_AND_SPS")) -}}
|
||||
{%- endset -%}
|
||||
{%- if var("DROP_UDFS_AND_SPS") -%}
|
||||
{%- do log("Drop core udfs: " ~ this.database ~ "." ~ this.schema, true) -%}
|
||||
{%- else -%}
|
||||
{%- do log("Deploy core udfs: " ~ this.database ~ "." ~ this.schema, true) -%}
|
||||
{%- endif -%}
|
||||
{%- do run_query(sql ~ apply_grants_by_schema(this.schema)) -%}
|
||||
{%- endif -%}
|
||||
SELECT '{{ model.schema }}' as schema_
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro ephemeral_deploy(configs) %}
|
||||
{#
|
||||
This macro is used to deploy functions using ephemeral models.
|
||||
It should only be used within an ephemeral model.
|
||||
#}
|
||||
{%- set blockchain = this.schema -%}
|
||||
{%- set network = this.identifier -%}
|
||||
{% set schema = blockchain ~ "_" ~ network %}
|
||||
{% if execute and (var("UPDATE_UDFS_AND_SPS") or var("DROP_UDFS_AND_SPS")) and model.unique_id in selected_resources %}
|
||||
{% set sql %}
|
||||
{% for config in configs %}
|
||||
{{- crud_udfs_by_chain(config, blockchain, network, var("DROP_UDFS_AND_SPS")) -}}
|
||||
{%- endfor -%}
|
||||
{%- endset -%}
|
||||
{%- if var("DROP_UDFS_AND_SPS") -%}
|
||||
{%- do log("Drop partner udfs: " ~ this.database ~ "." ~ schema, true) -%}
|
||||
{%- else -%}
|
||||
{%- do log("Deploy partner udfs: " ~ this.database ~ "." ~ schema, true) -%}
|
||||
{%- endif -%}
|
||||
{%- do run_query(sql ~ apply_grants_by_schema(schema)) -%}
|
||||
{%- endif -%}
|
||||
SELECT '{{ model.schema }}' as schema_
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro ephemeral_deploy_marketplace(configs) %}
|
||||
{#
|
||||
This macro is used to deploy functions using ephemeral models.
|
||||
It should only be used within an ephemeral model.
|
||||
#}
|
||||
{%- set schema = this.schema -%}
|
||||
{%- set utility_schema = this.identifier -%}
|
||||
{% if execute and (var("UPDATE_UDFS_AND_SPS") or var("DROP_UDFS_AND_SPS")) and model.unique_id in selected_resources %}
|
||||
{% set sql %}
|
||||
{% for config in configs %}
|
||||
{{- crud_udfs_by_marketplace(config, schema, utility_schema, var("DROP_UDFS_AND_SPS")) -}}
|
||||
{%- endfor -%}
|
||||
{%- endset -%}
|
||||
{%- if var("DROP_UDFS_AND_SPS") -%}
|
||||
{%- do log("Drop marketplace udfs: " ~ this.database ~ "." ~ schema, true) -%}
|
||||
{%- else -%}
|
||||
{%- do log("Deploy marketplace udfs: " ~ this.database ~ "." ~ schema, true) -%}
|
||||
{%- endif -%}
|
||||
{%- do run_query(sql ~ apply_grants_by_schema(schema)) -%}
|
||||
{%- endif -%}
|
||||
SELECT '{{ model.schema }}' as schema_
|
||||
{%- endmacro -%}
|
||||
|
||||
@ -1,9 +1,10 @@
|
||||
{% macro create_udf_get_chainhead() %}
|
||||
CREATE OR REPLACE EXTERNAL FUNCTION streamline.udf_get_chainhead(
|
||||
) returns variant api_integration = aws_polygon_api AS {% if target.name == "prod" %}
|
||||
'https://088pv40k78.execute-api.us-east-1.amazonaws.com/prod/get_chainhead'
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_get_chainhead() returns variant api_integration =
|
||||
{% if target.name == "prod" %}
|
||||
aws_polygon_api AS 'https://p6dhi5vxn4.execute-api.us-east-1.amazonaws.com/prod/get_chainhead'
|
||||
{% else %}
|
||||
'https://ug2z7nx4bi.execute-api.us-east-1.amazonaws.com/dev/get_chainhead'
|
||||
aws_polygon_api_dev AS 'https://rzyjrd54s6.execute-api.us-east-1.amazonaws.com/dev/get_chainhead'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
@ -11,21 +12,10 @@
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_json_rpc(
|
||||
json variant
|
||||
) returns text api_integration = aws_polygon_api AS {% if target.name == "prod" %}
|
||||
'https://088pv40k78.execute-api.us-east-1.amazonaws.com/prod/udf_bulk_json_rpc'
|
||||
) returns text api_integration = {% if target.name == "prod" %}
|
||||
aws_polygon_api AS 'https://p6dhi5vxn4.execute-api.us-east-1.amazonaws.com/prod/udf_bulk_json_rpc'
|
||||
{% else %}
|
||||
'https://ug2z7nx4bi.execute-api.us-east-1.amazonaws.com/dev/udf_bulk_json_rpc'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_bulk_get_traces() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_get_traces(
|
||||
json variant
|
||||
) returns text api_integration = aws_polygon_api AS {% if target.name == "prod" %}
|
||||
'https://088pv40k78.execute-api.us-east-1.amazonaws.com/prod/udf_bulk_get_traces'
|
||||
{% else %}
|
||||
'https://ug2z7nx4bi.execute-api.us-east-1.amazonaws.com/dev/udf_bulk_get_traces'
|
||||
aws_polygon_api_dev AS 'https://rzyjrd54s6.execute-api.us-east-1.amazonaws.com/dev/udf_bulk_json_rpc'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
@ -34,10 +24,10 @@
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_decode(
|
||||
abi ARRAY,
|
||||
DATA STRING
|
||||
) returns ARRAY api_integration = aws_polygon_api AS {% if target.name == "prod" %}
|
||||
'https://088pv40k78.execute-api.us-east-1.amazonaws.com/prod/decode_function'
|
||||
) returns ARRAY api_integration = {% if target.name == "prod" %}
|
||||
aws_polygon_api AS 'https://p6dhi5vxn4.execute-api.us-east-1.amazonaws.com/prod/decode_function'
|
||||
{% else %}
|
||||
'https://ug2z7nx4bi.execute-api.us-east-1.amazonaws.com/dev/decode_function'
|
||||
aws_polygon_api_dev AS 'https://rzyjrd54s6.execute-api.us-east-1.amazonaws.com/dev/decode_function'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
@ -46,20 +36,44 @@
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_decode(
|
||||
abi ARRAY,
|
||||
DATA OBJECT
|
||||
) returns ARRAY api_integration = aws_polygon_api AS {% if target.name == "prod" %}
|
||||
'https://088pv40k78.execute-api.us-east-1.amazonaws.com/prod/decode_log'
|
||||
) returns ARRAY api_integration = {% if target.name == "prod" %}
|
||||
aws_polygon_api AS 'https://p6dhi5vxn4.execute-api.us-east-1.amazonaws.com/prod/decode_log'
|
||||
{% else %}
|
||||
'https://ug2z7nx4bi.execute-api.us-east-1.amazonaws.com/dev/decode_log'
|
||||
aws_polygon_api_dev AS 'https://rzyjrd54s6.execute-api.us-east-1.amazonaws.com/dev/decode_log'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro create_udf_bulk_decode_logs() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_decode_logs(
|
||||
json OBJECT
|
||||
) returns ARRAY api_integration = aws_polygon_api AS {% if target.name == "prod" %}
|
||||
'https://088pv40k78.execute-api.us-east-1.amazonaws.com/prod/bulk_decode_logs'
|
||||
) returns ARRAY api_integration = {% if target.name == "prod" %}
|
||||
aws_polygon_api AS 'https://p6dhi5vxn4.execute-api.us-east-1.amazonaws.com/prod/bulk_decode_logs'
|
||||
{% else %}
|
||||
'https://ug2z7nx4bi.execute-api.us-east-1.amazonaws.com/dev/bulk_decode_logs'
|
||||
aws_polygon_api_dev AS'https://rzyjrd54s6.execute-api.us-east-1.amazonaws.com/dev/bulk_decode_logs'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_introspect(
|
||||
drop_ = False
|
||||
) %}
|
||||
{% set name_ = 'silver.udf_introspect' %}
|
||||
{% set signature = [('json', 'variant')] %}
|
||||
{% set return_type = 'text' %}
|
||||
{% set sql_ = construct_api_route("introspect") %}
|
||||
{% if not drop_ %}
|
||||
{{ create_sql_function(
|
||||
name_ = name_,
|
||||
signature = signature,
|
||||
return_type = return_type,
|
||||
sql_ = sql_,
|
||||
api_integration = var("API_INTEGRATION")
|
||||
) }}
|
||||
{% else %}
|
||||
{{ drop_function(
|
||||
name_,
|
||||
signature = signature,
|
||||
) }}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
|
||||
24
macros/streamline/utils.sql
Normal file
24
macros/streamline/utils.sql
Normal file
@ -0,0 +1,24 @@
|
||||
{% macro sql_live_rpc_call(method, params, blockchain, network) %}
|
||||
{#
|
||||
Helper macro to call a JSON RPC method on a live node.
|
||||
|
||||
Parameters:
|
||||
method (string): The JSON RPC method to call.
|
||||
params (string): The JSON RPC parameters to pass to the method.
|
||||
blockchain (string): The blockchain to call the method on.
|
||||
network (string): The network to call the method on.
|
||||
Returns:
|
||||
string: The SQL to call the method.
|
||||
#}
|
||||
WITH result as (
|
||||
SELECT
|
||||
live.udf_api(
|
||||
'{endpoint}'
|
||||
,utils.udf_json_rpc_call({{ method }}, {{ params }})
|
||||
,concat_ws('/', 'integration', _utils.udf_provider(), {{ blockchain }}, {{ network }})
|
||||
)::VARIANT:data AS data
|
||||
)
|
||||
SELECT
|
||||
COALESCE(data:result, {'error':data:error})
|
||||
FROM result
|
||||
{% endmacro -%}
|
||||
2
models/deploy/core/_live.sql
Normal file
2
models/deploy/core/_live.sql
Normal file
@ -0,0 +1,2 @@
|
||||
{% set config = config_core__live %}
|
||||
{{ ephemeral_deploy_core(config) }}
|
||||
2
models/deploy/core/_utils.sql
Normal file
2
models/deploy/core/_utils.sql
Normal file
@ -0,0 +1,2 @@
|
||||
{% set config = config_core__utils %}
|
||||
{{ ephemeral_deploy_core(config) }}
|
||||
11
models/deploy/core/_utils.yml
Normal file
11
models/deploy/core/_utils.yml
Normal file
@ -0,0 +1,11 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: _utils
|
||||
columns:
|
||||
- name: udf_introspect
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test___utils_udf_introspect
|
||||
args: "'hello'"
|
||||
assertions:
|
||||
- result = 'hello'
|
||||
5
models/deploy/core/live.sql
Normal file
5
models/deploy/core/live.sql
Normal file
@ -0,0 +1,5 @@
|
||||
-- depends_on: {{ ref('_utils') }}
|
||||
-- depends_on: {{ ref('utils') }}
|
||||
-- depends_on: {{ ref('_live') }}
|
||||
{% set config = config_core_live %}
|
||||
{{ ephemeral_deploy_core(config) }}
|
||||
6
models/deploy/core/secrets.sql
Normal file
6
models/deploy/core/secrets.sql
Normal file
@ -0,0 +1,6 @@
|
||||
-- depends_on: {{ ref('_utils') }}
|
||||
-- depends_on: {{ ref('live') }}
|
||||
{% if var("ENABLE_SNOWFLAKE_SECRETS") %}
|
||||
{% set config = config_core_secrets %}
|
||||
{{ ephemeral_deploy_core(config) }}
|
||||
{% endif %}
|
||||
45
models/deploy/core/secrets.yml
Normal file
45
models/deploy/core/secrets.yml
Normal file
@ -0,0 +1,45 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: secrets
|
||||
columns:
|
||||
- name: udf_register_secret
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_secrets__udf_register_secret
|
||||
args: >
|
||||
'test', 'test'
|
||||
assertions:
|
||||
- result = {'request_id':'test','success':false}
|
||||
- name: udf_get_secret
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_secrets__udf_get_secret
|
||||
args: >
|
||||
'test'
|
||||
assertions:
|
||||
- >
|
||||
result::OBJECT = {'key': 'value'}
|
||||
- name: udf_get_secrets
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_secrets__udf_get_secrets
|
||||
args: ''
|
||||
assertions:
|
||||
- >
|
||||
result = {'test': {'key': 'value'}}
|
||||
- name: udf_create_secret
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_secrets__udf_create_secret
|
||||
args: >
|
||||
'test', {'key': 'value'}
|
||||
assertions:
|
||||
- result = 200
|
||||
- name: udf_delete_secret
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_secrets__udf_delete_secret
|
||||
args: >
|
||||
'test_delete'
|
||||
assertions:
|
||||
- result = 200
|
||||
3
models/deploy/core/utils.sql
Normal file
3
models/deploy/core/utils.sql
Normal file
@ -0,0 +1,3 @@
|
||||
-- depends_on: {{ ref('_utils') }}
|
||||
{% set config = config_core_utils %}
|
||||
{{ ephemeral_deploy_core(config) }}
|
||||
240
models/deploy/core/utils.yml
Normal file
240
models/deploy/core/utils.yml
Normal file
@ -0,0 +1,240 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: utils
|
||||
columns:
|
||||
- name: udf_json_rpc_call
|
||||
tests:
|
||||
- test_udf:
|
||||
args: "'foo', [], 1"
|
||||
assertions:
|
||||
- >
|
||||
result = {
|
||||
'jsonrpc': '2.0',
|
||||
'method': 'foo',
|
||||
'params': [],
|
||||
'id': '1'
|
||||
}
|
||||
- name: udf_urlencode
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_true_1
|
||||
args: >
|
||||
{'a':'b'}, TRUE
|
||||
assertions:
|
||||
- result = 'a=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_true_2
|
||||
args: >
|
||||
{'a':'a', 'b':'b'}, TRUE
|
||||
assertions:
|
||||
- result = 'a=a&b=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_true_space
|
||||
args: >
|
||||
{'space': 'abc 123'}, TRUE
|
||||
assertions:
|
||||
- result = 'space=abc+123'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_true_special
|
||||
args: >
|
||||
{'special!': ' !@#$,+"'}, TRUE
|
||||
assertions:
|
||||
- result = 'special%21=+%21%40%23%24%2C%2B%22'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_true_array
|
||||
args: >
|
||||
{'array': [0, 1, 2]}, TRUE
|
||||
assertions:
|
||||
- result = 'array=0&array=1&array=2'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_false_1
|
||||
args: >
|
||||
{'a':'b'}, FALSE
|
||||
assertions:
|
||||
- result = 'a=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_false_2
|
||||
args: >
|
||||
{'a':'b', 'b':'b'}, FALSE
|
||||
assertions:
|
||||
- result = 'a=b&b=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_false_space
|
||||
args: >
|
||||
{'space': 'abc 123'}, FALSE
|
||||
assertions:
|
||||
- result = 'space=abc+123'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_false_special
|
||||
args: >
|
||||
{'special!': ' !@#$,+"'}, FALSE
|
||||
assertions:
|
||||
- result = 'special%21=+%21%40%23%24%2C%2B%22'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_false_array
|
||||
args: >
|
||||
{'array': [0, 1, 2]}, FALSE
|
||||
assertions:
|
||||
- result = 'array=%5B0%2C+1%2C+2%5D'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_1
|
||||
args: >
|
||||
{'a':'b'}
|
||||
assertions:
|
||||
- result = 'a=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_2
|
||||
args: >
|
||||
{'a':'b', 'b':'b'}
|
||||
assertions:
|
||||
- result = 'a=b&b=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_space
|
||||
args: >
|
||||
{'space': 'abc 123'}
|
||||
assertions:
|
||||
- result = 'space=abc+123'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_special
|
||||
args: >
|
||||
{'special!': ' !@#$,+"'}
|
||||
assertions:
|
||||
- result = 'special%21=+%21%40%23%24%2C%2B%22'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_array
|
||||
args: >
|
||||
{'array': [0, 1, 2]}
|
||||
assertions:
|
||||
- result = 'array=%5B0%2C+1%2C+2%5D'
|
||||
# write tests but use arrays of arrays instead of dictionaries
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_true_1
|
||||
args: >
|
||||
[['a', 'b']], TRUE
|
||||
assertions:
|
||||
- result = 'a=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_true_2
|
||||
args: >
|
||||
[['a', 'a'], ['b', 'b']], TRUE
|
||||
assertions:
|
||||
- result = 'a=a&b=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_true_space
|
||||
args: >
|
||||
[['space', 'abc 123']], TRUE
|
||||
assertions:
|
||||
- result = 'space=abc+123'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_true_special
|
||||
args: >
|
||||
[['special!', ' !@#$,+"']], TRUE
|
||||
assertions:
|
||||
- result = 'special%21=+%21%40%23%24%2C%2B%22'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_true_array
|
||||
args: >
|
||||
[['array', [0, 1, 2]]], TRUE
|
||||
assertions:
|
||||
- result = 'array=0&array=1&array=2'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_false_1
|
||||
args: >
|
||||
[['a', 'b']], FALSE
|
||||
assertions:
|
||||
- result = 'a=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_false_2
|
||||
args: >
|
||||
[['a', 'a'], ['b', 'b']], FALSE
|
||||
assertions:
|
||||
- result = 'a=a&b=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_false_space
|
||||
args: >
|
||||
[['space', 'abc 123']], FALSE
|
||||
assertions:
|
||||
- result = 'space=abc+123'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_false_special
|
||||
args: >
|
||||
[['special!', ' !@#$,+"']], FALSE
|
||||
assertions:
|
||||
- result = 'special%21=+%21%40%23%24%2C%2B%22'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_false_array
|
||||
args: >
|
||||
[['array', [0, 1, 2]]], FALSE
|
||||
assertions:
|
||||
- result = 'array=%5B0%2C+1%2C+2%5D'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_1
|
||||
args: >
|
||||
[['a', 'b']]
|
||||
assertions:
|
||||
- result = 'a=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_2
|
||||
args: >
|
||||
[['a', 'a'], ['b', 'b']]
|
||||
assertions:
|
||||
- result = 'a=a&b=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_space
|
||||
args: >
|
||||
[['space', 'abc 123']]
|
||||
assertions:
|
||||
- result = 'space=abc+123'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_special
|
||||
args: >
|
||||
[['special!', ' !@#$,+"']]
|
||||
assertions:
|
||||
- result = 'special%21=+%21%40%23%24%2C%2B%22'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_array
|
||||
args: >
|
||||
[['array', [0, 1, 2]]]
|
||||
assertions:
|
||||
- result = 'array=%5B0%2C+1%2C+2%5D'
|
||||
- name: udf_evm_decode_log
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_utils__udf_evm_decode_log
|
||||
args: >
|
||||
{
|
||||
'anonymous': false,
|
||||
'inputs': [
|
||||
{
|
||||
'indexed': false,
|
||||
'name': 'nonce',
|
||||
'type': 'uint256'
|
||||
}
|
||||
],
|
||||
'name': 'NonceChanged',
|
||||
'type': 'event'
|
||||
}::variant,
|
||||
{
|
||||
'address': '0x55032650b14df07b85bf18a3a3ec8e0af2e028d5',
|
||||
'data': '0x000000000000000000000000000000000000000000000000000000000000279c',
|
||||
'topics': [
|
||||
'0x7220970e1f1f12864ecccd8942690a837c7a8dd45d158cb891eb45a8a69134aa'
|
||||
]
|
||||
}::variant
|
||||
assertions:
|
||||
- >
|
||||
result = [
|
||||
{
|
||||
'address': '0x55032650b14df07b85bF18A3a3eC8E0Af2e028d5',
|
||||
'data': [
|
||||
{
|
||||
'decoded': true,
|
||||
'name': 'nonce',
|
||||
'type': 'uint256',
|
||||
'value': 10140
|
||||
}
|
||||
],
|
||||
'decoded': true,
|
||||
'name': 'NonceChanged'
|
||||
}
|
||||
]
|
||||
@ -0,0 +1,7 @@
|
||||
-- depends_on: {{ ref('live') }}
|
||||
-- depends_on: {{ ref('github_utils__github_utils') }}
|
||||
{%- set configs = [
|
||||
config_github_actions_udfs,
|
||||
config_github_actions_udtfs,
|
||||
] -%}
|
||||
{{- ephemeral_deploy_marketplace(configs) -}}
|
||||
@ -0,0 +1,37 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: github_actions__github_utils
|
||||
columns:
|
||||
- name: workflows
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_actions__workflows_status_200
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models'
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
- name: runs
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_actions__runs_status_200
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
{}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
- name: workflow_runs
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_actions__workflow_runs_status_200
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'dbt_run_dev_refresh.yml',
|
||||
{}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
@ -0,0 +1,5 @@
|
||||
-- depends_on: {{ ref('live') }}
|
||||
{%- set configs = [
|
||||
config_github_utils_udfs,
|
||||
] -%}
|
||||
{{- ephemeral_deploy_marketplace(configs) -}}
|
||||
@ -0,0 +1,11 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: github_utils__github_utils
|
||||
columns:
|
||||
- name: octocat
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_utils__octocat_status_200
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
7
models/silver/silver__temp.sql
Normal file
7
models/silver/silver__temp.sql
Normal file
@ -0,0 +1,7 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
tags = ['streamline_view']
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
1 AS temp
|
||||
@ -29,4 +29,4 @@ WHERE
|
||||
|
||||
qualify(ROW_NUMBER() over (PARTITION BY id
|
||||
ORDER BY
|
||||
_inserted_timestamp DESC)) = 1
|
||||
_inserted_timestamp DESC)) = 1
|
||||
@ -29,4 +29,4 @@ WHERE
|
||||
|
||||
qualify(ROW_NUMBER() over (PARTITION BY id
|
||||
ORDER BY
|
||||
_inserted_timestamp DESC)) = 1
|
||||
_inserted_timestamp DESC)) = 1
|
||||
@ -1,7 +1,7 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
post_hook = if_data_call_function(
|
||||
func = "{{this.schema}}.udf_bulk_get_traces(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'debug_traceBlockByNumber', 'sql_limit', {{var('sql_limit','24000')}}, 'producer_batch_size', {{var('producer_batch_size','6000')}}, 'worker_batch_size', {{var('worker_batch_size','300')}}, 'batch_call_limit', {{var('batch_call_limit','1')}}))",
|
||||
func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'debug_traceBlockByNumber', 'sql_limit', {{var('sql_limit','60000')}}, 'producer_batch_size', {{var('producer_batch_size','15000')}}, 'worker_batch_size', {{var('worker_batch_size','15000')}}, 'call_type', 'rest', 'exploded_key','[\"result\"]'))",
|
||||
target = "{{this.schema}}.{{this.identifier}}"
|
||||
),
|
||||
tags = ['streamline_core_history']
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
post_hook = if_data_call_function(
|
||||
func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'qn_getBlockWithReceipts', 'sql_limit', {{var('sql_limit','40000')}}, 'producer_batch_size', {{var('producer_batch_size','10000')}}, 'worker_batch_size', {{var('worker_batch_size','1000')}}, 'batch_call_limit', {{var('batch_call_limit','10')}}))",
|
||||
func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'qn_getBlockWithReceipts', 'sql_limit', {{var('sql_limit','50000')}}, 'producer_batch_size', {{var('producer_batch_size','25000')}}, 'worker_batch_size', {{var('worker_batch_size','12500')}}, 'batch_call_limit', {{var('batch_call_limit','10')}}, 'call_type', 'batch'))",
|
||||
target = "{{this.schema}}.{{this.identifier}}"
|
||||
),
|
||||
tags = ['streamline_core_history']
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
post_hook = if_data_call_function(
|
||||
func = "{{this.schema}}.udf_bulk_get_traces(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'debug_traceBlockByNumber', 'sql_limit', {{var('sql_limit','24000')}}, 'producer_batch_size', {{var('producer_batch_size','6000')}}, 'worker_batch_size', {{var('worker_batch_size','300')}}, 'batch_call_limit', {{var('batch_call_limit','1')}}))",
|
||||
func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'debug_traceBlockByNumber', 'sql_limit', {{var('sql_limit','60000')}}, 'producer_batch_size', {{var('producer_batch_size','15000')}}, 'worker_batch_size', {{var('worker_batch_size','15000')}}, 'call_type', 'rest', 'exploded_key','[\"result\"]'))",
|
||||
target = "{{this.schema}}.{{this.identifier}}"
|
||||
),
|
||||
tags = ['streamline_core_realtime']
|
||||
@ -86,9 +86,9 @@ SELECT
|
||||
' ',
|
||||
''
|
||||
),
|
||||
'",{"tracer": "callTracer"}',
|
||||
'",{"tracer": "callTracer","timeout": "30s"}',
|
||||
'],"id":"',
|
||||
block_number :: STRING,
|
||||
block_number :: INTEGER,
|
||||
'"}'
|
||||
)
|
||||
) AS request
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
{{ config (
|
||||
materialized = "view",
|
||||
post_hook = if_data_call_function(
|
||||
func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'qn_getBlockWithReceipts', 'sql_limit', {{var('sql_limit','40000')}}, 'producer_batch_size', {{var('producer_batch_size','10000')}}, 'worker_batch_size', {{var('worker_batch_size','1000')}}, 'batch_call_limit', {{var('batch_call_limit','10')}}))",
|
||||
func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'qn_getBlockWithReceipts', 'sql_limit', {{var('sql_limit','50000')}}, 'producer_batch_size', {{var('producer_batch_size','25000')}}, 'worker_batch_size', {{var('worker_batch_size','12500')}}, 'batch_call_limit', {{var('batch_call_limit','10')}}, 'call_type', 'batch'))",
|
||||
target = "{{this.schema}}.{{this.identifier}}"
|
||||
),
|
||||
tags = ['streamline_core_realtime']
|
||||
@ -16,7 +16,7 @@ WITH last_3_days AS (
|
||||
),
|
||||
blocks AS (
|
||||
SELECT
|
||||
block_number
|
||||
block_number :: STRING AS block_number
|
||||
FROM
|
||||
{{ ref("streamline__blocks") }}
|
||||
WHERE
|
||||
@ -30,7 +30,7 @@ blocks AS (
|
||||
)
|
||||
EXCEPT
|
||||
SELECT
|
||||
block_number
|
||||
block_number :: STRING
|
||||
FROM
|
||||
{{ ref("streamline__complete_qn_getBlockWithReceipts") }}
|
||||
WHERE
|
||||
@ -92,7 +92,7 @@ SELECT
|
||||
''
|
||||
),
|
||||
'"],"id":"',
|
||||
block_number :: STRING,
|
||||
block_number :: INTEGER,
|
||||
'"}'
|
||||
)
|
||||
) AS request
|
||||
|
||||
Loading…
Reference in New Issue
Block a user