mirror of
https://github.com/FlipsideCrypto/polygon-models.git
synced 2026-02-06 13:22:04 +00:00
parent
ada3e224f9
commit
a514efec38
@ -1,18 +0,0 @@
|
||||
{% macro config_core__live(schema="_live") %}
|
||||
|
||||
- name: {{ schema }}.udf_api
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [url, STRING]
|
||||
- [headers, OBJECT]
|
||||
- [DATA, OBJECT]
|
||||
- [user_id, STRING]
|
||||
- [SECRET, STRING]
|
||||
return_type: VARIANT
|
||||
func_type: EXTERNAL
|
||||
api_integration: '{{ var("API_INTEGRATION") }}'
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
sql: udf_api
|
||||
{% endmacro %}
|
||||
@ -1,63 +0,0 @@
|
||||
{% macro config_core__utils(schema="_utils") %}
|
||||
|
||||
- name: {{ schema }}.udf_introspect
|
||||
signature:
|
||||
- [echo, STRING]
|
||||
func_type: SECURE EXTERNAL
|
||||
return_type: TEXT
|
||||
api_integration: '{{ var("API_INTEGRATION") }}'
|
||||
sql: introspect
|
||||
|
||||
- name: {{ schema }}.udf_user_tier
|
||||
signature: []
|
||||
func_type: SECURE
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
MEMOIZABLE
|
||||
sql: |
|
||||
SELECT
|
||||
COALESCE(PARSE_JSON(GETVARIABLE('LIVEQUERY_CONTEXT')):userTier::STRING, 'community')
|
||||
|
||||
- name: {{ schema }}.udf_provider
|
||||
signature: []
|
||||
func_type: SECURE
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
MEMOIZABLE
|
||||
sql: |
|
||||
SELECT
|
||||
COALESCE(PARSE_JSON(GETVARIABLE('LIVEQUERY_CONTEXT')):provider::STRING, 'quicknode')
|
||||
|
||||
- name: {{ schema }}.udf_whoami
|
||||
signature: []
|
||||
func_type: SECURE
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
MEMOIZABLE
|
||||
sql: |
|
||||
SELECT
|
||||
COALESCE(PARSE_JSON(GETVARIABLE('LIVEQUERY_CONTEXT')):userId::STRING, CURRENT_USER())
|
||||
|
||||
- name: {{ schema }}.udf_register_secret
|
||||
signature:
|
||||
- [request_id, STRING]
|
||||
- [user_id, STRING]
|
||||
- [key, STRING]
|
||||
return_type: OBJECT
|
||||
func_type: SECURE EXTERNAL
|
||||
api_integration: '{{ var("API_INTEGRATION") }}'
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
sql: secret/register
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,123 +0,0 @@
|
||||
{% macro python_hex_to_int() %}
|
||||
def hex_to_int(hex) -> str:
|
||||
"""
|
||||
Converts hex (of any size) to int (as a string). Snowflake and java script can only handle up to 64-bit (38 digits of precision)
|
||||
hex_to_int('200000000000000000000000000000211');
|
||||
>> 680564733841876926926749214863536423441
|
||||
hex_to_int('0x200000000000000000000000000000211');
|
||||
>> 680564733841876926926749214863536423441
|
||||
hex_to_int(NULL);
|
||||
>> NULL
|
||||
"""
|
||||
try:
|
||||
return str(int(hex, 16)) if hex and hex != "0x" else None
|
||||
except:
|
||||
return None
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro python_udf_hex_to_int_with_encoding() %}
|
||||
def hex_to_int(encoding, hex) -> str:
|
||||
"""
|
||||
Converts hex (of any size) to int (as a string). Snowflake and java script can only handle up to 64-bit (38 digits of precision)
|
||||
hex_to_int('hex', '200000000000000000000000000000211');
|
||||
>> 680564733841876926926749214863536423441
|
||||
hex_to_int('hex', '0x200000000000000000000000000000211');
|
||||
>> 680564733841876926926749214863536423441
|
||||
hex_to_int('hex', NULL);
|
||||
>> NULL
|
||||
hex_to_int('s2c', 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffe5b83acf');
|
||||
>> -440911153
|
||||
"""
|
||||
try:
|
||||
if not hex:
|
||||
return None
|
||||
if encoding.lower() == 's2c':
|
||||
if hex[0:2].lower() != '0x':
|
||||
hex = f'0x{hex}'
|
||||
|
||||
bits = len(hex[2:]) * 4
|
||||
value = int(hex, 0)
|
||||
if value & (1 << (bits - 1)):
|
||||
value -= 1 << bits
|
||||
return str(value)
|
||||
else:
|
||||
return str(int(hex, 16))
|
||||
except:
|
||||
return None
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_keccak256() %}
|
||||
from Crypto.Hash import keccak
|
||||
|
||||
def udf_encode(event_name):
|
||||
keccak_hash = keccak.new(digest_bits=256)
|
||||
keccak_hash.update(event_name.encode('utf-8'))
|
||||
return '0x' + keccak_hash.hexdigest()
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_evm_text_signature() %}
|
||||
|
||||
def get_simplified_signature(abi):
|
||||
def generate_signature(inputs):
|
||||
signature_parts = []
|
||||
for input_data in inputs:
|
||||
if 'components' in input_data:
|
||||
component_signature_parts = []
|
||||
components = input_data['components']
|
||||
component_signature_parts.extend(generate_signature(components))
|
||||
component_signature_parts[-1] = component_signature_parts[-1].rstrip(",")
|
||||
if input_data['type'].endswith('[]'):
|
||||
signature_parts.append("(" + "".join(component_signature_parts) + ")[],")
|
||||
else:
|
||||
signature_parts.append("(" + "".join(component_signature_parts) + "),")
|
||||
else:
|
||||
signature_parts.append(input_data['type'].replace('enum ', '').replace(' payable', '') + ",")
|
||||
return signature_parts
|
||||
|
||||
signature_parts = [abi['name'] + "("]
|
||||
signature_parts.extend(generate_signature(abi['inputs']))
|
||||
signature_parts[-1] = signature_parts[-1].rstrip(",") + ")"
|
||||
return "".join(signature_parts)
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro python_object_to_url_query_string() %}
|
||||
from urllib.parse import urlencode
|
||||
|
||||
def object_to_url_query_string(query, doseq=False):
|
||||
{# return type(query) #}
|
||||
if isinstance(query, dict):
|
||||
return urlencode(query, doseq)
|
||||
return urlencode([tuple(i) for i in query], doseq)
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
{% macro python_udf_evm_transform_log(schema) %}
|
||||
from copy import deepcopy
|
||||
|
||||
def transform_event(event: dict):
|
||||
new_event = deepcopy(event)
|
||||
if new_event.get("components"):
|
||||
components = new_event.get("components")
|
||||
for iy, y in enumerate(new_event["value"]):
|
||||
for i, c in enumerate(components):
|
||||
y[i] = {"value": y[i], **c}
|
||||
new_event["value"][iy] = {z["name"]: z["value"] for z in y}
|
||||
return new_event
|
||||
else:
|
||||
return event
|
||||
|
||||
|
||||
def transform(events: list):
|
||||
try:
|
||||
results = [
|
||||
transform_event(event) if event["decoded"] else event
|
||||
for event in events["data"]
|
||||
]
|
||||
events["data"] = results
|
||||
return events
|
||||
except:
|
||||
return events
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,15 +0,0 @@
|
||||
{% macro sql_udf_json_rpc_call(use_default_id=True ) %}
|
||||
SELECT IFF(method IS NULL or params IS NULL,
|
||||
NULL,
|
||||
{
|
||||
'jsonrpc': '2.0',
|
||||
'method': method,
|
||||
'params': params
|
||||
{% if use_default_id %}
|
||||
, 'id': hash(method, params)::string
|
||||
{% else %}
|
||||
, 'id': id
|
||||
{% endif %}
|
||||
}
|
||||
)
|
||||
{% endmacro %}
|
||||
@ -1,137 +0,0 @@
|
||||
{% macro config_core_live(schema="live") %}
|
||||
|
||||
- name: {{ schema }}.udf_api
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [url, STRING]
|
||||
- [headers, OBJECT]
|
||||
- [data, OBJECT]
|
||||
- [secret_name, STRING]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
sql: |
|
||||
SELECT
|
||||
_live.UDF_API(
|
||||
method,
|
||||
url,
|
||||
headers,
|
||||
data,
|
||||
_utils.UDF_WHOAMI(),
|
||||
secret_name
|
||||
)
|
||||
- name: {{ schema }}.udf_api
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [url, STRING]
|
||||
- [headers, OBJECT]
|
||||
- [data, OBJECT]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
sql: |
|
||||
SELECT
|
||||
_live.UDF_API(
|
||||
method,
|
||||
url,
|
||||
headers,
|
||||
data,
|
||||
_utils.UDF_WHOAMI(),
|
||||
''
|
||||
)
|
||||
- name: {{ schema }}.udf_api
|
||||
signature:
|
||||
- [url, STRING]
|
||||
- [data, OBJECT]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
sql: |
|
||||
SELECT
|
||||
_live.UDF_API(
|
||||
'POST',
|
||||
url,
|
||||
{'Content-Type': 'application/json'},
|
||||
data,
|
||||
_utils.UDF_WHOAMI(),
|
||||
''
|
||||
)
|
||||
- name: {{ schema }}.udf_api
|
||||
signature:
|
||||
- [url, STRING]
|
||||
- [data, OBJECT]
|
||||
- [secret_name, STRING]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
sql: |
|
||||
SELECT
|
||||
_live.UDF_API(
|
||||
'POST',
|
||||
url,
|
||||
{'Content-Type': 'application/json'},
|
||||
data,
|
||||
_utils.UDF_WHOAMI(),
|
||||
secret_name
|
||||
)
|
||||
- name: {{ schema }}.udf_api
|
||||
signature:
|
||||
- [url, STRING]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
sql: |
|
||||
SELECT
|
||||
_live.UDF_API(
|
||||
'GET',
|
||||
url,
|
||||
{},
|
||||
{},
|
||||
_utils.UDF_WHOAMI(),
|
||||
''
|
||||
)
|
||||
- name: {{ schema }}.udf_api
|
||||
signature:
|
||||
- [url, STRING]
|
||||
- [secret_name, STRING]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
sql: |
|
||||
SELECT
|
||||
_live.UDF_API(
|
||||
'GET',
|
||||
url,
|
||||
{},
|
||||
{},
|
||||
_utils.UDF_WHOAMI(),
|
||||
secret_name
|
||||
)
|
||||
|
||||
- name: {{ schema }}.udf_rpc
|
||||
signature:
|
||||
- [blockchain, STRING]
|
||||
- [network, STRING]
|
||||
- [method, STRING]
|
||||
- [parameters, VARIANT]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
VOLATILE
|
||||
COMMENT = $$Executes an JSON RPC call on a blockchain.$$
|
||||
sql: |
|
||||
{{ sql_live_rpc_call("method", "parameters", "blockchain", "network") | indent(4) -}}
|
||||
{% endmacro %}
|
||||
@ -1,87 +0,0 @@
|
||||
{% macro config_core_secrets(schema="secrets") %}
|
||||
|
||||
|
||||
- name: {{ schema }}.udf_register_secret
|
||||
signature:
|
||||
- [request_id, STRING]
|
||||
- [key, STRING]
|
||||
func_type: SECURE
|
||||
return_type: OBJECT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
COMMENT = $$ Registers a secret with the given request ID and key. $$
|
||||
sql: |
|
||||
SELECT
|
||||
_utils.UDF_REGISTER_SECRET(REQUEST_ID, _utils.UDF_WHOAMI(), KEY)
|
||||
|
||||
- name: {{ schema }}.udf_get_secret
|
||||
signature:
|
||||
- [name, STRING]
|
||||
func_type: SECURE
|
||||
return_type: OBJECT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
COMMENT = $$ Returns the secret value for the given secret name. $$
|
||||
sql: |
|
||||
SELECT
|
||||
live.udf_api(
|
||||
CONCAT_WS('/', {{ construct_api_route("secret") }}, _utils.UDF_WHOAMI(), NAME)
|
||||
):data::OBJECT
|
||||
|
||||
- name: {{ schema }}.udf_get_secrets
|
||||
signature: []
|
||||
func_type: SECURE
|
||||
return_type: OBJECT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
COMMENT = $$ Returns all secrets for the current user. $$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ schema }}.udf_get_secret('')
|
||||
|
||||
- name: {{ schema }}.udf_create_secret
|
||||
signature:
|
||||
- [name, STRING]
|
||||
- [secret, OBJECT]
|
||||
func_type: SECURE
|
||||
return_type: [INTEGER, the HTTP status code - 200 indicates success]
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
COMMENT = $$ Creates a new secret with the given name and value. $$
|
||||
sql: |
|
||||
SELECT
|
||||
live.udf_api(
|
||||
CONCAT_WS('/', {{ construct_api_route("secret") }}, _utils.UDF_WHOAMI(), NAME),
|
||||
SECRET
|
||||
):status_code::INTEGER
|
||||
|
||||
- name: {{ schema }}.udf_delete_secret
|
||||
signature:
|
||||
- [name, STRING]
|
||||
func_type: SECURE
|
||||
return_type: [INTEGER, the HTTP status code - 200 indicates success]
|
||||
options: |
|
||||
NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
COMMENT = $$ Deletes the secret with the given name. $$
|
||||
sql: |
|
||||
SELECT
|
||||
live.udf_api(
|
||||
'DELETE',
|
||||
CONCAT_WS('/', {{ construct_api_route("secret") }}, _utils.UDF_WHOAMI(), NAME),
|
||||
{},
|
||||
{},
|
||||
''
|
||||
):status_code::INTEGER
|
||||
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,241 +0,0 @@
|
||||
{% macro config_core_utils(schema="utils") %}
|
||||
|
||||
|
||||
- name: {{ schema }}.udf_register_secret
|
||||
signature:
|
||||
- [request_id, STRING]
|
||||
- [key, STRING]
|
||||
func_type: SECURE
|
||||
return_type: OBJECT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: |
|
||||
SELECT
|
||||
_utils.UDF_REGISTER_SECRET(REQUEST_ID, _utils.UDF_WHOAMI(), KEY)
|
||||
|
||||
- name: {{ schema }}.udf_hex_to_int
|
||||
signature:
|
||||
- [hex, STRING]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE PYTHON
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
RUNTIME_VERSION = '3.8'
|
||||
HANDLER = 'hex_to_int'
|
||||
sql: |
|
||||
{{ python_hex_to_int() | indent(4) }}
|
||||
- name: {{ schema }}.udf_hex_to_int
|
||||
signature:
|
||||
- [encoding, STRING]
|
||||
- [hex, STRING]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE PYTHON
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
RUNTIME_VERSION = '3.8'
|
||||
HANDLER = 'hex_to_int'
|
||||
sql: |
|
||||
{{ python_udf_hex_to_int_with_encoding() | indent(4) }}
|
||||
- name: {{ schema }}.udf_evm_text_signature
|
||||
signature:
|
||||
- [abi, VARIANT]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
LANGUAGE PYTHON
|
||||
RUNTIME_VERSION = '3.8'
|
||||
HANDLER = 'get_simplified_signature'
|
||||
sql: |
|
||||
{{ create_udf_evm_text_signature() | indent(4) }}
|
||||
- name: {{ schema }}.udf_keccak256
|
||||
signature:
|
||||
- [event_name, VARCHAR(255)]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
LANGUAGE PYTHON
|
||||
RUNTIME_VERSION = '3.8'
|
||||
PACKAGES = ('pycryptodome==3.15.0')
|
||||
HANDLER = 'udf_encode'
|
||||
sql: |
|
||||
{{ create_udf_keccak256() | indent(4) }}
|
||||
- name: {{ schema }}.udf_hex_to_string
|
||||
signature:
|
||||
- [hex, STRING]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE SQL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: |
|
||||
SELECT
|
||||
LTRIM(regexp_replace(
|
||||
try_hex_decode_string(hex),
|
||||
'[\x00-\x1F\x7F-\x9F\xAD]', '', 1))
|
||||
- name: {{ schema }}.udf_int_to_hex
|
||||
signature:
|
||||
- [int, INTEGER]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE SQL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: |
|
||||
select CONCAT('0x', TRIM(TO_CHAR(int, 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX')))
|
||||
|
||||
- name: {{ schema }}.udf_json_rpc_call
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [params, ARRAY]
|
||||
return_type: OBJECT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE SQL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: |
|
||||
{{ sql_udf_json_rpc_call() }}
|
||||
- name: {{ schema }}.udf_json_rpc_call
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [params, OBJECT]
|
||||
return_type: OBJECT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE SQL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: |
|
||||
{{ sql_udf_json_rpc_call() }}
|
||||
- name: {{ schema }}.udf_json_rpc_call
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [params, OBJECT]
|
||||
- [id, STRING]
|
||||
return_type: OBJECT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE SQL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: |
|
||||
{{ sql_udf_json_rpc_call(False) }}
|
||||
- name: {{ schema }}.udf_json_rpc_call
|
||||
signature:
|
||||
- [method, STRING]
|
||||
- [params, ARRAY]
|
||||
- [id, STRING]
|
||||
return_type: OBJECT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE SQL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: |
|
||||
{{ sql_udf_json_rpc_call(False) }}
|
||||
|
||||
- name: {{ schema }}.udf_urlencode
|
||||
signature:
|
||||
- [query, OBJECT]
|
||||
- [doseq, BOOLEAN]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE PYTHON
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
RUNTIME_VERSION = '3.8'
|
||||
COMMENT=$$Pthon (function)[https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode] to convert an object to a URL query string.$$
|
||||
HANDLER = 'object_to_url_query_string'
|
||||
sql: |
|
||||
{{ python_object_to_url_query_string() | indent(4) }}
|
||||
- name: {{ schema }}.udf_urlencode
|
||||
signature:
|
||||
- [query, ARRAY]
|
||||
- [doseq, BOOLEAN]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE PYTHON
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
RUNTIME_VERSION = '3.8'
|
||||
COMMENT=$$Pthon (function)[https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode] to convert an array to a URL query string.$$
|
||||
HANDLER = 'object_to_url_query_string'
|
||||
sql: |
|
||||
{{ python_object_to_url_query_string() | indent(4) }}
|
||||
- name: {{ schema }}.udf_urlencode
|
||||
signature:
|
||||
- [query, ARRAY]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE SQL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: |
|
||||
SELECT {{ schema }}.udf_urlencode(query, FALSE)
|
||||
- name: {{ schema }}.udf_urlencode
|
||||
signature:
|
||||
- [query, OBJECT]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE SQL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: |
|
||||
SELECT {{ schema }}.udf_urlencode(query, FALSE)
|
||||
- name: {{ schema }}.udf_object_to_url_query_string
|
||||
signature:
|
||||
- [object, OBJECT]
|
||||
return_type: TEXT
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
IMMUTABLE
|
||||
sql: SELECT utils.udf_urlencode(object, FALSE)
|
||||
|
||||
- name: {{ schema }}.udf_evm_transform_log
|
||||
signature:
|
||||
- [decoded, VARIANT]
|
||||
return_type: VARIANT
|
||||
options: |
|
||||
NULL
|
||||
LANGUAGE PYTHON
|
||||
IMMUTABLE
|
||||
RUNTIME_VERSION = '3.8'
|
||||
HANDLER = 'transform'
|
||||
sql: |
|
||||
{{ python_udf_evm_transform_log() | indent(4) }}
|
||||
|
||||
- name: {{ schema }}.udf_evm_decode_log
|
||||
signature:
|
||||
- [abi, ARRAY]
|
||||
- [data, OBJECT]
|
||||
return_type: ARRAY
|
||||
func_type: EXTERNAL
|
||||
api_integration: '{{ var("API_INTEGRATION") }}'
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
sql: evm/decode/log
|
||||
- name: {{ schema }}.udf_evm_decode_log
|
||||
signature:
|
||||
- [abi, OBJECT]
|
||||
- [data, OBJECT]
|
||||
return_type: ARRAY
|
||||
func_type: EXTERNAL
|
||||
api_integration: '{{ var("API_INTEGRATION") }}'
|
||||
options: |
|
||||
NOT NULL
|
||||
RETURNS NULL ON NULL INPUT
|
||||
sql: evm/decode/log
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,148 +0,0 @@
|
||||
{% macro config_github_actions_udfs(schema_name = "github_actions", utils_schema_name = "github_utils") -%}
|
||||
{#
|
||||
This macro is used to generate the Github API Calls
|
||||
#}
|
||||
- name: {{ schema_name -}}.workflows
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$[List repository workflows](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#list-repository-workflows).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.GET(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows'),
|
||||
query
|
||||
):data::OBJECT
|
||||
- name: {{ schema_name -}}.workflows
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$[List repository workflows](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#list-repository-workflows).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ schema_name -}}.workflows(owner, repo, {})
|
||||
|
||||
- name: {{ schema_name -}}.runs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Lists all workflow runs for a repository. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-repository).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.GET(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/runs'),
|
||||
query
|
||||
):data::OBJECT
|
||||
- name: {{ schema_name -}}.runs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Lists all workflow runs for a repository. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-repository).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ schema_name -}}.runs(owner, repo, {})
|
||||
|
||||
- name: {{ schema_name -}}.workflow_runs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [workflow_id, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.GET(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'runs'),
|
||||
query
|
||||
):data::OBJECT
|
||||
- name: {{ schema_name -}}.workflow_runs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [workflow_id, "TEXT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ schema_name -}}.workflow_runs(owner, repo, workflow_id, {})
|
||||
|
||||
- name: {{ schema_name -}}.workflow_dispatches
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [workflow_id, "TEXT"]
|
||||
- [body, "OBJECT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace workflow_id with the workflow file name. For example, you could use main.yaml. [Docs](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#create-a-workflow-dispatch-event).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.POST(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'dispatches'),
|
||||
COALESCE(body, {'ref': 'main'})::OBJECT
|
||||
)::OBJECT
|
||||
|
||||
- name: {{ schema_name -}}.workflow_dispatches
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [workflow_id, "TEXT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace workflow_id with the workflow file name. For example, you could use main.yaml. [Docs](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#create-a-workflow-dispatch-event).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ schema_name -}}.workflow_dispatches(owner, repo, workflow_id, NULL)
|
||||
|
||||
- name: {{ schema_name -}}.workflow_enable
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [workflow_id, "TEXT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Enables a workflow. You can replace workflow_id with the workflow file name. For example, you could use main.yaml. [Docs](https://docs.github.com/en/rest/reference/actions#enable-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.PUT(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'enable'),
|
||||
{}
|
||||
)::OBJECT
|
||||
- name: {{ schema_name -}}.workflow_disable
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [workflow_id, "TEXT"]
|
||||
return_type:
|
||||
- "OBJECT"
|
||||
options: |
|
||||
COMMENT = $$Disables a workflow. You can replace workflow_id with the workflow file name. For example, you could use main.yaml. [Docs](https://docs.github.com/en/rest/reference/actions#disable-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
{{ utils_schema_name }}.PUT(
|
||||
CONCAT_WS('/', 'repos', owner, repo, 'actions/workflows', workflow_id, 'disable'),
|
||||
{}
|
||||
)::OBJECT
|
||||
{% endmacro %}
|
||||
@ -1,169 +0,0 @@
|
||||
{% macro config_github_actions_udtfs(schema_name = "github_actions", utils_schema_name = "github_utils") -%}
|
||||
{#
|
||||
This macro is used to generate the Github API Calls
|
||||
#}
|
||||
- name: {{ schema_name -}}.tf_workflows
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "TABLE(id INTEGER, badge_url STRING, created_at TIMESTAMP, html_url STRING, name STRING, node_id STRING, path STRING, state STRING, updated_at TIMESTAMP, url STRING)"
|
||||
options: |
|
||||
COMMENT = $$[List repository workflows](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#list-repository-workflows).$$
|
||||
sql: |
|
||||
WITH workflows AS
|
||||
(
|
||||
SELECT
|
||||
github_actions.workflows(OWNER, REPO, QUERY) AS response
|
||||
)
|
||||
SELECT
|
||||
value:id::INTEGER AS id
|
||||
,value:badge_url::STRING AS badge_url
|
||||
,value:created_at::TIMESTAMP AS created_at
|
||||
,value:html_url::STRING AS html_url
|
||||
,value:name::STRING AS name
|
||||
,value:node_id::STRING AS node_id
|
||||
,value:path::STRING AS path
|
||||
,value:state::STRING AS state
|
||||
,value:updated_at::TIMESTAMP AS updated_at
|
||||
,value:url::STRING AS url
|
||||
FROM workflows, LATERAL FLATTEN( input=> response:workflows)
|
||||
- name: {{ schema_name -}}.tf_workflows
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
return_type:
|
||||
- "TABLE(id INTEGER, badge_url STRING, created_at TIMESTAMP, html_url STRING, name STRING, node_id STRING, path STRING, state STRING, updated_at TIMESTAMP, url STRING)"
|
||||
options: |
|
||||
COMMENT = $$[List repository workflows](https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#list-repository-workflows).$$
|
||||
sql: |
|
||||
SELECT *
|
||||
FROM TABLE({{ schema_name -}}.tf_workflows(owner, repo, {}))
|
||||
|
||||
- name: {{ schema_name -}}.tf_runs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "TABLE(id NUMBER, name STRING, node_id STRING, check_suite_id NUMBER, check_suite_node_id STRING, head_branch STRING, head_sha STRING, run_number NUMBER, event STRING, display_title STRING, status STRING, conclusion STRING, workflow_id NUMBER, url STRING, html_url STRING, pull_requests STRING, created_at TIMESTAMP, updated_at TIMESTAMP, actor OBJECT, run_attempt STRING, run_started_at TIMESTAMP, triggering_actor OBJECT, jobs_url STRING, logs_url STRING, check_suite_url STRING, artifacts_url STRING, cancel_url STRING, rerun_url STRING, workflow_url STRING, head_commit OBJECT, repository OBJECT, head_repository OBJECT)"
|
||||
options: |
|
||||
COMMENT = $$Lists all workflow runs for a repository. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-repository).$$
|
||||
|
||||
sql: |
|
||||
WITH response AS
|
||||
(
|
||||
SELECT
|
||||
github_actions.runs(OWNER, REPO, QUERY) AS response
|
||||
)
|
||||
SELECT
|
||||
value:id::NUMBER AS id
|
||||
,value:name::STRING AS name
|
||||
,value:node_id::STRING AS node_id
|
||||
,value:check_suite_id::NUMBER AS check_suite_id
|
||||
,value:check_suite_node_id::STRING AS check_suite_node_id
|
||||
,value:head_branch::STRING AS head_branch
|
||||
,value:head_sha::STRING AS head_sha
|
||||
,value:run_number::NUMBER AS run_number
|
||||
,value:event::STRING AS event
|
||||
,value:display_title::STRING AS display_title
|
||||
,value:status::STRING AS status
|
||||
,value:conclusion::STRING AS conclusion
|
||||
,value:workflow_id::NUMBER AS workflow_id
|
||||
,value:url::STRING AS url
|
||||
,value:html_url::STRING AS html_url
|
||||
,value:pull_requests::STRING AS pull_requests
|
||||
,value:created_at::TIMESTAMP AS created_at
|
||||
,value:updated_at::TIMESTAMP AS updated_at
|
||||
,value:actor::OBJECT AS actor
|
||||
,value:run_attempt::STRING AS run_attempt
|
||||
,value:run_started_at::TIMESTAMP AS run_started_at
|
||||
,value:triggering_actor::OBJECT AS triggering_actor
|
||||
,value:jobs_url::STRING AS jobs_url
|
||||
,value:logs_url::STRING AS logs_url
|
||||
,value:check_suite_url::STRING AS check_suite_url
|
||||
,value:artifacts_url::STRING AS artifacts_url
|
||||
,value:cancel_url::STRING AS cancel_url
|
||||
,value:rerun_url::STRING AS rerun_url
|
||||
,value:workflow_url::STRING AS workflow_url
|
||||
,value:head_commit::OBJECT AS head_commit
|
||||
,value:repository::OBJECT AS repository
|
||||
,value:head_repository::OBJECT AS head_repository
|
||||
FROM response, LATERAL FLATTEN( input=> response:workflow_runs)
|
||||
- name: {{ schema_name -}}.tf_runs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
return_type:
|
||||
- "TABLE(id NUMBER, name STRING, node_id STRING, check_suite_id NUMBER, check_suite_node_id STRING, head_branch STRING, head_sha STRING, run_number NUMBER, event STRING, display_title STRING, status STRING, conclusion STRING, workflow_id NUMBER, url STRING, html_url STRING, pull_requests STRING, created_at TIMESTAMP, updated_at TIMESTAMP, actor OBJECT, run_attempt STRING, run_started_at TIMESTAMP, triggering_actor OBJECT, jobs_url STRING, logs_url STRING, check_suite_url STRING, artifacts_url STRING, cancel_url STRING, rerun_url STRING, workflow_url STRING, head_commit OBJECT, repository OBJECT, head_repository OBJECT)"
|
||||
options: |
|
||||
COMMENT = $$Lists all workflow runs for a repository. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-repository).$$
|
||||
sql: |
|
||||
SELECT *
|
||||
FROM TABLE({{ schema_name -}}.tf_runs(owner, repo, {}))
|
||||
|
||||
- name: {{ schema_name -}}.tf_workflow_runs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [workflkow_id, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "TABLE(id NUMBER, name STRING, node_id STRING, check_suite_id NUMBER, check_suite_node_id STRING, head_branch STRING, head_sha STRING, run_number NUMBER, event STRING, display_title STRING, status STRING, conclusion STRING, workflow_id NUMBER, url STRING, html_url STRING, pull_requests STRING, created_at TIMESTAMP, updated_at TIMESTAMP, actor OBJECT, run_attempt STRING, run_started_at TIMESTAMP, triggering_actor OBJECT, jobs_url STRING, logs_url STRING, check_suite_url STRING, artifacts_url STRING, cancel_url STRING, rerun_url STRING, workflow_url STRING, head_commit OBJECT, repository OBJECT, head_repository OBJECT)"
|
||||
options: |
|
||||
COMMENT = $$Lists all workflow runs for a repository. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-repository).$$
|
||||
sql: |
|
||||
WITH response AS
|
||||
(
|
||||
SELECT
|
||||
github_actions.workflow_runs(OWNER, REPO, WORKFLKOW_ID, QUERY) AS response
|
||||
)
|
||||
SELECT
|
||||
value:id::NUMBER AS id
|
||||
,value:name::STRING AS name
|
||||
,value:node_id::STRING AS node_id
|
||||
,value:check_suite_id::NUMBER AS check_suite_id
|
||||
,value:check_suite_node_id::STRING AS check_suite_node_id
|
||||
,value:head_branch::STRING AS head_branch
|
||||
,value:head_sha::STRING AS head_sha
|
||||
,value:run_number::NUMBER AS run_number
|
||||
,value:event::STRING AS event
|
||||
,value:display_title::STRING AS display_title
|
||||
,value:status::STRING AS status
|
||||
,value:conclusion::STRING AS conclusion
|
||||
,value:workflow_id::NUMBER AS workflow_id
|
||||
,value:url::STRING AS url
|
||||
,value:html_url::STRING AS html_url
|
||||
,value:pull_requests::STRING AS pull_requests
|
||||
,value:created_at::TIMESTAMP AS created_at
|
||||
,value:updated_at::TIMESTAMP AS updated_at
|
||||
,value:actor::OBJECT AS actor
|
||||
,value:run_attempt::STRING AS run_attempt
|
||||
,value:run_started_at::TIMESTAMP AS run_started_at
|
||||
,value:triggering_actor::OBJECT AS triggering_actor
|
||||
,value:jobs_url::STRING AS jobs_url
|
||||
,value:logs_url::STRING AS logs_url
|
||||
,value:check_suite_url::STRING AS check_suite_url
|
||||
,value:artifacts_url::STRING AS artifacts_url
|
||||
,value:cancel_url::STRING AS cancel_url
|
||||
,value:rerun_url::STRING AS rerun_url
|
||||
,value:workflow_url::STRING AS workflow_url
|
||||
,value:head_commit::OBJECT AS head_commit
|
||||
,value:repository::OBJECT AS repository
|
||||
,value:head_repository::OBJECT AS head_repository
|
||||
FROM response, LATERAL FLATTEN( input=> response:workflow_runs)
|
||||
- name: {{ schema_name -}}.tf_workflow_runs
|
||||
signature:
|
||||
- [owner, "TEXT"]
|
||||
- [repo, "TEXT"]
|
||||
- [workflkow_id, "TEXT"]
|
||||
return_type:
|
||||
- "TABLE(id NUMBER, name STRING, node_id STRING, check_suite_id NUMBER, check_suite_node_id STRING, head_branch STRING, head_sha STRING, run_number NUMBER, event STRING, display_title STRING, status STRING, conclusion STRING, workflow_id NUMBER, url STRING, html_url STRING, pull_requests STRING, created_at TIMESTAMP, updated_at TIMESTAMP, actor OBJECT, run_attempt STRING, run_started_at TIMESTAMP, triggering_actor OBJECT, jobs_url STRING, logs_url STRING, check_suite_url STRING, artifacts_url STRING, cancel_url STRING, rerun_url STRING, workflow_url STRING, head_commit OBJECT, repository OBJECT, head_repository OBJECT)"
|
||||
options: |
|
||||
COMMENT = $$Lists all workflow runs for a repository. You can use query parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-repository).$$
|
||||
sql: |
|
||||
SELECT *
|
||||
FROM TABLE({{ schema_name -}}.tf_workflow_runs(owner, repo, WORKFLKOW_ID, {}))
|
||||
|
||||
{% endmacro %}
|
||||
@ -1,87 +0,0 @@
|
||||
{% macro config_github_utils_udfs(schema_name = "github_utils", utils_schema_name = "github_utils") -%}
|
||||
{#
|
||||
This macro is used to generate the Github API Calls
|
||||
#}
|
||||
- name: {{ schema_name -}}.octocat
|
||||
signature:
|
||||
- []
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$Verify token [Authenticating to the REST API](https://docs.github.com/en/rest/overview/authenticating-to-the-rest-api?apiVersion=2022-11-28).$$
|
||||
sql: |
|
||||
SELECT
|
||||
live.udf_api(
|
||||
'GET',
|
||||
'https://api.github.com/octocat',
|
||||
{'Authorization': 'Bearer {TOKEN}',
|
||||
'X-GitHub-Api-Version': '2022-11-28'},
|
||||
{},
|
||||
'_FSC_SYS/GITHUB'
|
||||
) as response
|
||||
|
||||
- name: {{ schema_name -}}.headers
|
||||
signature: []
|
||||
return_type:
|
||||
- "TEXT"
|
||||
options: |
|
||||
NOT NULL
|
||||
IMMUTABLE
|
||||
MEMOIZABLE
|
||||
sql: |
|
||||
SELECT '{"Authorization": "Bearer {TOKEN}",
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
"Accept": "application/vnd.github+json"}'
|
||||
|
||||
- name: {{ schema_name -}}.get
|
||||
signature:
|
||||
- [route, "TEXT"]
|
||||
- [query, "OBJECT"]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
live.udf_api(
|
||||
'GET',
|
||||
CONCAT_WS('/', 'https://api.github.com', route || '?') || utils.udf_urlencode(query),
|
||||
PARSE_JSON({{ schema_name -}}.headers()),
|
||||
{},
|
||||
'_FSC_SYS/GITHUB'
|
||||
)
|
||||
- name: {{ schema_name -}}.post
|
||||
signature:
|
||||
- [route, "TEXT"]
|
||||
- [data, "OBJECT"]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
live.udf_api(
|
||||
'POST',
|
||||
CONCAT_WS('/', 'https://api.github.com', route),
|
||||
PARSE_JSON({{ schema_name -}}.headers()),
|
||||
data,
|
||||
'_FSC_SYS/GITHUB'
|
||||
)
|
||||
- name: {{ schema_name -}}.put
|
||||
signature:
|
||||
- [route, "TEXT"]
|
||||
- [data, "OBJECT"]
|
||||
return_type:
|
||||
- "VARIANT"
|
||||
options: |
|
||||
COMMENT = $$List all workflow runs for a workflow. You can replace workflow_id with the workflow file name. You can use parameters to narrow the list of results. [Docs](https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow).$$
|
||||
sql: |
|
||||
SELECT
|
||||
live.udf_api(
|
||||
'PUT',
|
||||
CONCAT_WS('/', 'https://api.github.com', route),
|
||||
PARSE_JSON({{ schema_name -}}.headers()),
|
||||
data,
|
||||
'_FSC_SYS/GITHUB'
|
||||
)
|
||||
{% endmacro %}
|
||||
@ -1,2 +0,0 @@
|
||||
{% set config = config_core__live %}
|
||||
{{ ephemeral_deploy_core(config) }}
|
||||
@ -1,2 +0,0 @@
|
||||
{% set config = config_core__utils %}
|
||||
{{ ephemeral_deploy_core(config) }}
|
||||
@ -1,11 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: _utils
|
||||
columns:
|
||||
- name: udf_introspect
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test___utils_udf_introspect
|
||||
args: "'hello'"
|
||||
assertions:
|
||||
- result = 'hello'
|
||||
@ -1,5 +0,0 @@
|
||||
-- depends_on: {{ ref('_utils') }}
|
||||
-- depends_on: {{ ref('utils') }}
|
||||
-- depends_on: {{ ref('_live') }}
|
||||
{% set config = config_core_live %}
|
||||
{{ ephemeral_deploy_core(config) }}
|
||||
@ -1,4 +0,0 @@
|
||||
-- depends_on: {{ ref('_utils') }}
|
||||
-- depends_on: {{ ref('live') }}
|
||||
{% set config = config_core_secrets %}
|
||||
{{ ephemeral_deploy_core(config) }}
|
||||
@ -1,45 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: secrets
|
||||
columns:
|
||||
- name: udf_register_secret
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_secrets__udf_register_secret
|
||||
args: >
|
||||
'test', 'test'
|
||||
assertions:
|
||||
- result = {'request_id':'test','success':false}
|
||||
- name: udf_get_secret
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_secrets__udf_get_secret
|
||||
args: >
|
||||
'test'
|
||||
assertions:
|
||||
- >
|
||||
result::OBJECT = {'key': 'value'}
|
||||
- name: udf_get_secrets
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_secrets__udf_get_secrets
|
||||
args: ''
|
||||
assertions:
|
||||
- >
|
||||
result = {'test': {'key': 'value'}}
|
||||
- name: udf_create_secret
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_secrets__udf_create_secret
|
||||
args: >
|
||||
'test', {'key': 'value'}
|
||||
assertions:
|
||||
- result = 200
|
||||
- name: udf_delete_secret
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_secrets__udf_delete_secret
|
||||
args: >
|
||||
'test_delete'
|
||||
assertions:
|
||||
- result = 200
|
||||
@ -1,3 +0,0 @@
|
||||
-- depends_on: {{ ref('_utils') }}
|
||||
{% set config = config_core_utils %}
|
||||
{{ ephemeral_deploy_core(config) }}
|
||||
@ -1,240 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: utils
|
||||
columns:
|
||||
- name: udf_json_rpc_call
|
||||
tests:
|
||||
- test_udf:
|
||||
args: "'foo', [], 1"
|
||||
assertions:
|
||||
- >
|
||||
result = {
|
||||
'jsonrpc': '2.0',
|
||||
'method': 'foo',
|
||||
'params': [],
|
||||
'id': '1'
|
||||
}
|
||||
- name: udf_urlencode
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_true_1
|
||||
args: >
|
||||
{'a':'b'}, TRUE
|
||||
assertions:
|
||||
- result = 'a=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_true_2
|
||||
args: >
|
||||
{'a':'a', 'b':'b'}, TRUE
|
||||
assertions:
|
||||
- result = 'a=a&b=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_true_space
|
||||
args: >
|
||||
{'space': 'abc 123'}, TRUE
|
||||
assertions:
|
||||
- result = 'space=abc+123'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_true_special
|
||||
args: >
|
||||
{'special!': ' !@#$,+"'}, TRUE
|
||||
assertions:
|
||||
- result = 'special%21=+%21%40%23%24%2C%2B%22'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_true_array
|
||||
args: >
|
||||
{'array': [0, 1, 2]}, TRUE
|
||||
assertions:
|
||||
- result = 'array=0&array=1&array=2'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_false_1
|
||||
args: >
|
||||
{'a':'b'}, FALSE
|
||||
assertions:
|
||||
- result = 'a=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_false_2
|
||||
args: >
|
||||
{'a':'b', 'b':'b'}, FALSE
|
||||
assertions:
|
||||
- result = 'a=b&b=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_false_space
|
||||
args: >
|
||||
{'space': 'abc 123'}, FALSE
|
||||
assertions:
|
||||
- result = 'space=abc+123'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_false_special
|
||||
args: >
|
||||
{'special!': ' !@#$,+"'}, FALSE
|
||||
assertions:
|
||||
- result = 'special%21=+%21%40%23%24%2C%2B%22'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_false_array
|
||||
args: >
|
||||
{'array': [0, 1, 2]}, FALSE
|
||||
assertions:
|
||||
- result = 'array=%5B0%2C+1%2C+2%5D'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_1
|
||||
args: >
|
||||
{'a':'b'}
|
||||
assertions:
|
||||
- result = 'a=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_2
|
||||
args: >
|
||||
{'a':'b', 'b':'b'}
|
||||
assertions:
|
||||
- result = 'a=b&b=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_space
|
||||
args: >
|
||||
{'space': 'abc 123'}
|
||||
assertions:
|
||||
- result = 'space=abc+123'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_special
|
||||
args: >
|
||||
{'special!': ' !@#$,+"'}
|
||||
assertions:
|
||||
- result = 'special%21=+%21%40%23%24%2C%2B%22'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_dict_array
|
||||
args: >
|
||||
{'array': [0, 1, 2]}
|
||||
assertions:
|
||||
- result = 'array=%5B0%2C+1%2C+2%5D'
|
||||
# write tests but use arrays of arrays instead of dictionaries
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_true_1
|
||||
args: >
|
||||
[['a', 'b']], TRUE
|
||||
assertions:
|
||||
- result = 'a=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_true_2
|
||||
args: >
|
||||
[['a', 'a'], ['b', 'b']], TRUE
|
||||
assertions:
|
||||
- result = 'a=a&b=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_true_space
|
||||
args: >
|
||||
[['space', 'abc 123']], TRUE
|
||||
assertions:
|
||||
- result = 'space=abc+123'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_true_special
|
||||
args: >
|
||||
[['special!', ' !@#$,+"']], TRUE
|
||||
assertions:
|
||||
- result = 'special%21=+%21%40%23%24%2C%2B%22'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_true_array
|
||||
args: >
|
||||
[['array', [0, 1, 2]]], TRUE
|
||||
assertions:
|
||||
- result = 'array=0&array=1&array=2'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_false_1
|
||||
args: >
|
||||
[['a', 'b']], FALSE
|
||||
assertions:
|
||||
- result = 'a=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_false_2
|
||||
args: >
|
||||
[['a', 'a'], ['b', 'b']], FALSE
|
||||
assertions:
|
||||
- result = 'a=a&b=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_false_space
|
||||
args: >
|
||||
[['space', 'abc 123']], FALSE
|
||||
assertions:
|
||||
- result = 'space=abc+123'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_false_special
|
||||
args: >
|
||||
[['special!', ' !@#$,+"']], FALSE
|
||||
assertions:
|
||||
- result = 'special%21=+%21%40%23%24%2C%2B%22'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_false_array
|
||||
args: >
|
||||
[['array', [0, 1, 2]]], FALSE
|
||||
assertions:
|
||||
- result = 'array=%5B0%2C+1%2C+2%5D'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_1
|
||||
args: >
|
||||
[['a', 'b']]
|
||||
assertions:
|
||||
- result = 'a=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_2
|
||||
args: >
|
||||
[['a', 'a'], ['b', 'b']]
|
||||
assertions:
|
||||
- result = 'a=a&b=b'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_space
|
||||
args: >
|
||||
[['space', 'abc 123']]
|
||||
assertions:
|
||||
- result = 'space=abc+123'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_special
|
||||
args: >
|
||||
[['special!', ' !@#$,+"']]
|
||||
assertions:
|
||||
- result = 'special%21=+%21%40%23%24%2C%2B%22'
|
||||
- test_udf:
|
||||
name: test_utils__udf_urlencode_array_array
|
||||
args: >
|
||||
[['array', [0, 1, 2]]]
|
||||
assertions:
|
||||
- result = 'array=%5B0%2C+1%2C+2%5D'
|
||||
- name: udf_evm_decode_log
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_utils__udf_evm_decode_log
|
||||
args: >
|
||||
{
|
||||
'anonymous': false,
|
||||
'inputs': [
|
||||
{
|
||||
'indexed': false,
|
||||
'name': 'nonce',
|
||||
'type': 'uint256'
|
||||
}
|
||||
],
|
||||
'name': 'NonceChanged',
|
||||
'type': 'event'
|
||||
}::variant,
|
||||
{
|
||||
'address': '0x55032650b14df07b85bf18a3a3ec8e0af2e028d5',
|
||||
'data': '0x000000000000000000000000000000000000000000000000000000000000279c',
|
||||
'topics': [
|
||||
'0x7220970e1f1f12864ecccd8942690a837c7a8dd45d158cb891eb45a8a69134aa'
|
||||
]
|
||||
}::variant
|
||||
assertions:
|
||||
- >
|
||||
result = [
|
||||
{
|
||||
'address': '0x55032650b14df07b85bF18A3a3eC8E0Af2e028d5',
|
||||
'data': [
|
||||
{
|
||||
'decoded': true,
|
||||
'name': 'nonce',
|
||||
'type': 'uint256',
|
||||
'value': 10140
|
||||
}
|
||||
],
|
||||
'decoded': true,
|
||||
'name': 'NonceChanged'
|
||||
}
|
||||
]
|
||||
@ -1,7 +0,0 @@
|
||||
-- depends_on: {{ ref('live') }}
|
||||
-- depends_on: {{ ref('github_utils__github_utils') }}
|
||||
{%- set configs = [
|
||||
config_github_actions_udfs,
|
||||
config_github_actions_udtfs,
|
||||
] -%}
|
||||
{{- ephemeral_deploy_marketplace(configs) -}}
|
||||
@ -1,37 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: github_actions__github_utils
|
||||
columns:
|
||||
- name: workflows
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_actions__workflows_status_200
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models'
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
- name: runs
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_actions__runs_status_200
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
{}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
- name: workflow_runs
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_actions__workflow_runs_status_200
|
||||
args: >
|
||||
'FlipsideCrypto',
|
||||
'admin-models',
|
||||
'dbt_run_dev_refresh.yml',
|
||||
{}
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
@ -1,5 +0,0 @@
|
||||
-- depends_on: {{ ref('live') }}
|
||||
{%- set configs = [
|
||||
config_github_utils_udfs,
|
||||
] -%}
|
||||
{{- ephemeral_deploy_marketplace(configs) -}}
|
||||
@ -1,11 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: github_utils__github_utils
|
||||
columns:
|
||||
- name: octocat
|
||||
tests:
|
||||
- test_udf:
|
||||
name: test_github_utils__octocat_status_200
|
||||
assertions:
|
||||
- result:status_code = 200
|
||||
- result:error IS NULL
|
||||
@ -6,6 +6,6 @@ packages:
|
||||
- package: dbt-labs/dbt_utils
|
||||
version: 1.0.0
|
||||
- git: https://github.com/FlipsideCrypto/fsc-utils.git
|
||||
revision: v1.6.2
|
||||
revision: v1.8.0
|
||||
- package: get-select/dbt_snowflake_query_tags
|
||||
version: [">=2.0.0", "<3.0.0"]
|
||||
Loading…
Reference in New Issue
Block a user