Compare commits

...

32 Commits

Author SHA1 Message Date
Austin
a363429861
Merge pull request #70 from FlipsideCrypto/fix/udf-compile-error
Fix/udf compile error
2025-12-10 13:50:06 -05:00
Austin
f0ef82c39a int to hex 2025-12-10 13:42:54 -05:00
Austin
e84c7f4601 db 2025-12-10 13:38:21 -05:00
Austin
4dd7280484 move comments to read me 2025-12-10 13:34:41 -05:00
Austin
1036b6833a remove comments 2025-12-10 13:29:48 -05:00
Austin
a91069c976 updates 2025-12-10 13:28:43 -05:00
Austin
c9a5b819c8
Merge pull request #69 from FlipsideCrypto/DAT2-195/read-functions
Dat2 195/read functions
2025-12-10 13:19:47 -05:00
Austin
d0e3f57772 format 2025-12-10 13:15:16 -05:00
Austin
70d5fc1c3e updates 2025-12-10 12:58:54 -05:00
Austin
027f73276c dbs 2025-12-09 13:36:48 -05:00
Austin
6981b8b42d db 2025-12-09 13:31:48 -05:00
Austin
1fd0466311 comment 2025-12-09 13:28:53 -05:00
Austin
b0e51e2b4d updates 2025-12-09 13:24:24 -05:00
drethereum
3697967c46
Merge pull request #68 from FlipsideCrypto/addnew/decompress-zlib-udf
addnew/decompress-zlib-udf
2025-11-04 13:45:28 -07:00
drethereum
70e238a548 closing macro 2025-11-04 12:33:39 -07:00
drethereum
4317c353a5 merge 2025-10-22 17:12:17 -06:00
drethereum
3985d78199 udf 2025-10-22 16:54:55 -06:00
Jensen Yap
6415fc4873
Merge pull request #66 from FlipsideCrypto/hotfix/upgrade-livequery-models-bug
Upgrade livequery models revision to v1.10.2
2025-08-19 11:33:25 +09:00
Jensen Yap
de65b99f86 Upgrade livequery models revision to v1.10.2 2025-08-16 02:34:40 +09:00
Jensen Yap
45fcf86aea
Merge pull request #65 from FlipsideCrypto/STREAM-1324/upgrade-livequery-version 2025-08-13 02:29:50 +09:00
Jensen Yap
ef0f0deec0 update to 1.10.1 2025-08-13 02:03:12 +09:00
Jensen Yap
76b46b9026 update livequery models revision to v1.10.0 2025-08-08 14:06:49 +09:00
Jensen Yap
4799e897e1 fix branch name 2025-08-08 13:56:37 +09:00
Jensen Yap
7b6feb4a40 update 2025-08-08 13:55:55 +09:00
Jensen Yap
36dab6002f bump version to 1.10.0 2025-08-08 12:20:27 +09:00
Jensen Yap
a0672aff35 update livequery models version 2025-08-08 01:21:33 +09:00
Jensen Yap
88e94f5160 update 2025-08-07 23:39:45 +09:00
Jensen Yap
567b311ca8 upgrade livequery models version 2025-08-07 16:13:09 +09:00
Matt Romano
3def5e5c44
Merge pull request #64 from FlipsideCrypto/add-coingecko-stablecoin-parse-udf
add-coingecko-stablecoin-parse-udf
2025-07-30 12:30:30 -07:00
mattromano
cfc2c69de8 add stable coin parse udf configs and logic 2025-07-30 11:10:13 -07:00
Austin
957f7252ab
Merge pull request #63 from FlipsideCrypto/bump-lq-models-1-9
livequery models update
2025-06-12 12:53:40 -04:00
Austin
d5a43b13ef livequery models update 2025-06-12 12:26:49 -04:00
4 changed files with 738 additions and 7 deletions

116
README.md
View File

@ -159,6 +159,122 @@ The `fsc_utils` dbt package is a centralized repository consisting of various db
```
- `utils.udf_encode_contract_call`: Encodes EVM contract function calls into ABI-encoded calldata format for eth_call RPC requests. Handles all Solidity types including tuples and arrays.
```
-- Simple function with no inputs
SELECT utils.udf_encode_contract_call(
PARSE_JSON('{"name": "totalSupply", "inputs": []}'),
ARRAY_CONSTRUCT()
);
-- Returns: 0x18160ddd
-- Function with single address parameter
SELECT utils.udf_encode_contract_call(
PARSE_JSON('{
"name": "balanceOf",
"inputs": [{"name": "account", "type": "address"}]
}'),
ARRAY_CONSTRUCT('0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48')
);
-- Returns: 0x70a08231000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48
-- Function with multiple parameters
SELECT utils.udf_encode_contract_call(
PARSE_JSON('{
"name": "transfer",
"inputs": [
{"name": "to", "type": "address"},
{"name": "amount", "type": "uint256"}
]
}'),
ARRAY_CONSTRUCT('0x1234567890123456789012345678901234567890', 1000000)
);
-- Complex function with nested tuples
SELECT utils.udf_encode_contract_call(
PARSE_JSON('{
"name": "swap",
"inputs": [{
"name": "params",
"type": "tuple",
"components": [
{"name": "tokenIn", "type": "address"},
{"name": "tokenOut", "type": "address"},
{"name": "amountIn", "type": "uint256"}
]
}]
}'),
ARRAY_CONSTRUCT(
ARRAY_CONSTRUCT(
'0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48',
'0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2',
1000000
)
)
);
```
- `utils.udf_create_eth_call`: Creates an eth_call JSON-RPC request object from contract address and encoded calldata. Supports block parameter as string or number (auto-converts numbers to hex).
```
-- Using default 'latest' block
SELECT utils.udf_create_eth_call(
'0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48',
'0x70a08231000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48'
);
-- Using specific block number (auto-converted to hex)
SELECT utils.udf_create_eth_call(
'0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48',
'0x70a08231000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48',
18500000
);
```
- `utils.udf_create_eth_call_from_abi`: Convenience function that combines contract call encoding and JSON-RPC request creation in a single call. Recommended for most use cases.
```
-- Simple balanceOf call with default 'latest' block
SELECT utils.udf_create_eth_call_from_abi(
'0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48',
PARSE_JSON('{
"name": "balanceOf",
"inputs": [{"name": "account", "type": "address"}]
}'),
ARRAY_CONSTRUCT('0xbcca60bb61934080951369a648fb03df4f96263c')
);
-- Same call but at a specific block number
SELECT utils.udf_create_eth_call_from_abi(
'0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48',
PARSE_JSON('{
"name": "balanceOf",
"inputs": [{"name": "account", "type": "address"}]
}'),
ARRAY_CONSTRUCT('0xbcca60bb61934080951369a648fb03df4f96263c'),
18500000
);
-- Using ABI from a table
WITH abi_data AS (
SELECT
abi,
'0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48' as contract_address,
'0xbcca60bb61934080951369a648fb03df4f96263c' as user_address
FROM ethereum.silver.flat_function_abis
WHERE contract_address = LOWER('0x43506849d7c04f9138d1a2050bbf3a0c054402dd')
AND function_name = 'balanceOf'
)
SELECT
utils.udf_create_eth_call_from_abi(
contract_address,
abi,
ARRAY_CONSTRUCT(user_address)
) as rpc_call
FROM abi_data;
```
## **Streamline V 2.0 Functions**
The `Streamline V 2.0` functions are a set of macros and UDFs that are designed to be used with `Streamline V 2.0` deployments.

View File

@ -30,6 +30,18 @@
sql: |
{{ fsc_utils.python_udf_hex_to_int_with_encoding() | indent(4) }}
- name: {{ schema }}.udf_int_to_hex
signature:
- [int, NUMBER]
return_type: VARCHAR(16777216)
options: |
NULL
LANGUAGE SQL
STRICT IMMUTABLE
sql: |
SELECT CONCAT('0x', TRIM(TO_CHAR(int, 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX')))
- name: {{ schema }}.udf_hex_to_string
signature:
- [hex, STRING]
@ -254,5 +266,144 @@
sql: |
{{ fsc_utils.create_udtf_flatten_overflowed_responses() | indent(4) }}
- name: {{ schema }}.udf_decompress_zlib
signature:
- [compressed_string, STRING]
return_type: STRING
options: |
LANGUAGE PYTHON
RUNTIME_VERSION = '3.10'
COMMENT = 'Decompresses zlib/deflate-compressed data from Python bytes literal string format'
HANDLER = 'decompress_zlib'
sql: |
{{ fsc_utils.create_udf_decompress_zlib() | indent(4) }}
- name: {{ schema }}.udf_stablecoin_data_parse
signature:
- [peggeddata_content, STRING]
return_type: |
TABLE (
id STRING,
name STRING,
address STRING,
symbol STRING,
onCoinGecko BOOLEAN,
gecko_id STRING,
cmcId STRING,
pegType STRING,
pegMechanism STRING,
priceSource STRING,
deadFrom STRING,
delisted BOOLEAN,
deprecated BOOLEAN,
doublecounted BOOLEAN
)
options: |
LANGUAGE PYTHON
RUNTIME_VERSION = '3.10'
HANDLER = 'udf_stablecoin_data_parse'
sql: |
{{ fsc_utils.create_udf_stablecoin_data_parse() | indent(4) }}
- name: {{ schema }}.udf_encode_contract_call
signature:
- [function_abi, VARIANT]
- [input_values, ARRAY]
return_type: STRING
options: |
LANGUAGE PYTHON
RUNTIME_VERSION = '3.10'
PACKAGES = ('eth-abi')
HANDLER = 'encode_call'
COMMENT = 'Encodes EVM contract function calls into ABI-encoded calldata format for eth_call RPC requests. Handles all Solidity types including tuples and arrays.'
sql: |
{{ fsc_utils.create_udf_encode_contract_call() | indent(4) }}
- name: {{ schema }}.udf_create_eth_call
signature:
- [contract_address, STRING]
- [encoded_calldata, STRING]
return_type: OBJECT
options: |
NULL
LANGUAGE SQL
STRICT IMMUTABLE
COMMENT = 'Creates an eth_call JSON-RPC request object with default block parameter "latest".'
sql: |
{{ schema }}.udf_json_rpc_call(
'eth_call',
ARRAY_CONSTRUCT(
OBJECT_CONSTRUCT(
'to', contract_address,
'data', encoded_calldata
),
'latest'
)
)
- name: {{ schema }}.udf_create_eth_call
signature:
- [contract_address, STRING]
- [encoded_calldata, STRING]
- [block_parameter, VARIANT]
return_type: OBJECT
options: |
NULL
LANGUAGE SQL
STRICT IMMUTABLE
COMMENT = 'Creates an eth_call JSON-RPC request object. Accepts contract address, encoded calldata, and optional block parameter (string or number). If block_parameter is a number, it will be converted to hex format using ai.utils.udf_int_to_hex.'
sql: |
{{ schema }}.udf_json_rpc_call(
'eth_call',
ARRAY_CONSTRUCT(
OBJECT_CONSTRUCT(
'to', contract_address,
'data', encoded_calldata
),
CASE
WHEN block_parameter IS NULL THEN 'latest'
WHEN TYPEOF(block_parameter) IN ('INTEGER', 'NUMBER', 'FIXED', 'FLOAT') THEN
{{ schema }}.udf_int_to_hex(block_parameter::NUMBER)
ELSE block_parameter::STRING
END
)
)
- name: {{ schema }}.udf_create_eth_call_from_abi
signature:
- [contract_address, STRING]
- [function_abi, VARIANT]
- [input_values, ARRAY]
return_type: OBJECT
options: |
NULL
LANGUAGE SQL
STRICT IMMUTABLE
COMMENT = 'Convenience function that combines contract call encoding and JSON-RPC request creation for eth_call. Encodes function call from ABI and creates RPC request with default block parameter "latest".'
sql: |
{{ schema }}.udf_create_eth_call(
contract_address,
{{ schema }}.udf_encode_contract_call(function_abi, input_values)
)
- name: {{ schema }}.udf_create_eth_call_from_abi
signature:
- [contract_address, STRING]
- [function_abi, VARIANT]
- [input_values, ARRAY]
- [block_parameter, VARIANT]
return_type: OBJECT
options: |
NULL
LANGUAGE SQL
STRICT IMMUTABLE
COMMENT = 'Convenience function that combines contract call encoding and JSON-RPC request creation for eth_call. Encodes function call from ABI and creates RPC request with specified block parameter.'
sql: |
{{ schema }}.udf_create_eth_call(
contract_address,
{{ schema }}.udf_encode_contract_call(function_abi, input_values),
block_parameter
)
{% endmacro %}

View File

@ -540,4 +540,473 @@ class FlattenRows:
cleansed["index_cols"] = cleansed[temp_index_cols].apply(list, axis=1)
cleansed.drop(columns=temp_index_cols, inplace=True, errors="ignore")
return list(cleansed[np.roll(cleansed.columns.values, 1).tolist()].itertuples(index=False, name=None))
{% endmacro %}
{% endmacro %}
{% macro create_udf_decompress_zlib() %}
import zlib
import codecs
def decompress_zlib(compressed_string):
try:
if not compressed_string:
return None
# Remove b prefix and suffix if present
if compressed_string.startswith("b'") and compressed_string.endswith("'"):
compressed_string = compressed_string[2:-1]
elif compressed_string.startswith('b"') and compressed_string.endswith('"'):
compressed_string = compressed_string[2:-1]
# Decode the escaped string to bytes
compressed_bytes = codecs.decode(compressed_string, 'unicode_escape')
# Convert to bytes if string
if isinstance(compressed_bytes, str):
compressed_bytes = compressed_bytes.encode('latin-1')
# Decompress the zlib data
decompressed = zlib.decompress(compressed_bytes)
# Return as UTF-8 string
return decompressed.decode('utf-8')
except Exception as e:
return f"Error decompressing: {str(e)}"
{% endmacro %}
{% macro create_udf_stablecoin_data_parse() %}
import re
class udf_stablecoin_data_parse:
def process(self, peggeddata_content):
"""Main parsing function"""
def extract_field_value(obj_text, field_name):
"""Extract field value from object text using regex patterns"""
# Handle different field patterns
patterns = [
rf'{field_name}\s*:\s*"([^"]*)"',
rf"{field_name}\s*:\s*'([^']*)'",
rf'{field_name}\s*:\s*`([^`]*)`',
rf'{field_name}\s*:\s*(true|false|null|undefined)',
rf'{field_name}\s*:\s*([^,}}\n]+)'
]
for pattern in patterns:
match = re.search(pattern, obj_text, re.IGNORECASE | re.DOTALL)
if match:
value = match.group(1).strip()
# Clean up the value
value = re.sub(r'[,}}\n]', '', value).strip()
if value.lower() in ('null', 'undefined', ''):
return None
# Handle boolean values
if value.lower() == 'true':
return True
if value.lower() == 'false':
return False
return value
return None
def convert_value(value, expected_type):
"""Convert value to appropriate type"""
if value is None:
return None
if expected_type == 'BOOLEAN':
if isinstance(value, bool):
return value
if isinstance(value, str):
lower = value.lower()
if lower == 'true':
return True
if lower == 'false':
return False
return None
return str(value) if value is not None else None
try:
# Find the main array content - make the regex non-greedy but capture everything
array_match = re.search(r'export\s+default\s*\[(.*)\];?\s*$', peggeddata_content, re.DOTALL)
if not array_match:
raise Exception('Could not find exported array in peggedData content')
array_content = array_match.group(1).strip()
# Use a simpler regex-based approach to split objects
# Remove comments and clean up the array content first
# Instead of removing line comments entirely, just remove the // markers but keep the content
clean_content = re.sub(r'^\s*//\s*', '', array_content, flags=re.MULTILINE) # Remove // at start of lines
clean_content = re.sub(r'\n\s*//\s*', '\n', clean_content) # Remove // from middle of lines
# Instead of removing block comments entirely, just remove the comment markers but keep the content
clean_content = re.sub(r'/\*', '', clean_content) # Remove opening block comment markers
clean_content = re.sub(r'\*/', '', clean_content) # Remove closing block comment markers
# Find all objects using regex - look for {...} patterns
# This is more reliable than manual parsing
object_pattern = r'\{[^{}]*(?:\{[^{}]*\}[^{}]*)*\}'
matches = re.finditer(object_pattern, clean_content, re.DOTALL)
objects = []
for match in matches:
obj_text = match.group(0).strip()
if obj_text and len(obj_text) > 10: # Filter out small matches
objects.append(obj_text)
# If the simple regex didn't work, try a more complex nested approach
if not objects:
# More complex regex for nested objects
nested_pattern = r'\{(?:[^{}]|(?:\{[^{}]*\}))*\}'
nested_matches = re.findall(nested_pattern, clean_content, re.DOTALL)
objects = [obj.strip() for obj in nested_matches if len(obj.strip()) > 20]
# Still no objects? Try manual parsing with better logic
if not objects:
objects = []
current_object = ''
brace_count = 0
in_string = False
string_char = ''
i = 0
while i < len(clean_content):
char = clean_content[i]
# Handle string literals
if not in_string and char in ('"', "'", '`'):
in_string = True
string_char = char
elif in_string and char == string_char:
# Check if it's escaped
if i > 0 and clean_content[i-1] != '\\':
in_string = False
string_char = ''
# Handle braces only when not in string
if not in_string:
if char == '{':
if brace_count == 0:
current_object = '{' # Start new object
else:
current_object += char
brace_count += 1
elif char == '}':
current_object += char
brace_count -= 1
if brace_count == 0 and current_object.strip():
# Complete object found
objects.append(current_object.strip())
current_object = ''
elif brace_count > 0:
current_object += char
else:
if brace_count > 0:
current_object += char
i += 1
if not objects:
# Last resort: try splitting on id: pattern
id_splits = re.split(r'\n\s*id:\s*["\']', clean_content)
if len(id_splits) > 1:
objects = []
for i, part in enumerate(id_splits[1:], 1): # Skip first empty part
# Try to reconstruct the object
obj_start = clean_content.find(f'id:', clean_content.find(part))
if obj_start > 0:
# Look backwards for opening brace
brace_start = clean_content.rfind('{', 0, obj_start)
if brace_start >= 0:
# Look forward for matching closing brace
brace_count = 0
for j in range(brace_start, len(clean_content)):
if clean_content[j] == '{':
brace_count += 1
elif clean_content[j] == '}':
brace_count -= 1
if brace_count == 0:
obj_text = clean_content[brace_start:j+1].strip()
if len(obj_text) > 20:
objects.append(obj_text)
break
if not objects:
raise Exception(f'No objects found after all parsing attempts. Sample content: {clean_content[:500]}...')
# Process each object and extract the required fields
for i, obj_text in enumerate(objects):
try:
data = {
'id': extract_field_value(obj_text, 'id'),
'name': extract_field_value(obj_text, 'name'),
'address': extract_field_value(obj_text, 'address'),
'symbol': extract_field_value(obj_text, 'symbol'),
'onCoinGecko': extract_field_value(obj_text, 'onCoinGecko'),
'gecko_id': extract_field_value(obj_text, 'gecko_id'),
'cmcId': extract_field_value(obj_text, 'cmcId'),
'pegType': extract_field_value(obj_text, 'pegType'),
'pegMechanism': extract_field_value(obj_text, 'pegMechanism'),
'priceSource': extract_field_value(obj_text, 'priceSource'),
'deadFrom': extract_field_value(obj_text, 'deadFrom'),
'delisted': extract_field_value(obj_text, 'delisted'),
'deprecated': extract_field_value(obj_text, 'deprecated'),
'doublecounted': extract_field_value(obj_text, 'doublecounted')
}
# Only include objects that have at least id and name
if data['id'] and data['name']:
yield (
convert_value(data['id'], 'STRING'),
convert_value(data['name'], 'STRING'),
convert_value(data['address'], 'STRING'),
convert_value(data['symbol'], 'STRING'),
convert_value(data['onCoinGecko'], 'BOOLEAN'),
convert_value(data['gecko_id'], 'STRING'),
convert_value(data['cmcId'], 'STRING'),
convert_value(data['pegType'], 'STRING'),
convert_value(data['pegMechanism'], 'STRING'),
convert_value(data['priceSource'], 'STRING'),
convert_value(data['deadFrom'], 'STRING'),
convert_value(data['delisted'], 'BOOLEAN'),
convert_value(data['deprecated'], 'BOOLEAN'),
convert_value(data['doublecounted'], 'BOOLEAN')
)
except Exception as obj_error:
# Skip malformed objects but continue processing
continue
except Exception as error:
raise Exception(f'Error parsing peggedData content: {str(error)}')
{% endmacro %}
{% macro create_udf_encode_contract_call() %}
def encode_call(function_abi, input_values):
"""
Encodes EVM contract function calls into ABI-encoded calldata.
This function generates complete calldata (selector + encoded params) that can be
used directly in eth_call JSON-RPC requests to query contract state.
"""
import eth_abi
from eth_hash.auto import keccak
import json
def get_function_signature(abi):
"""
Generate function signature using the same logic as utils.udf_evm_text_signature.
Examples:
balanceOf(address)
transfer(address,uint256)
swap((address,address,uint256))
"""
def generate_signature(inputs):
signature_parts = []
for input_data in inputs:
if 'components' in input_data:
# Handle nested tuples
component_signature_parts = []
components = input_data['components']
component_signature_parts.extend(generate_signature(components))
component_signature_parts[-1] = component_signature_parts[-1].rstrip(",")
if input_data['type'].endswith('[]'):
signature_parts.append("(" + "".join(component_signature_parts) + ")[],")
else:
signature_parts.append("(" + "".join(component_signature_parts) + "),")
else:
# Clean up Solidity-specific modifiers
signature_parts.append(input_data['type'].replace('enum ', '').replace(' payable', '') + ",")
return signature_parts
signature_parts = [abi['name'] + "("]
signature_parts.extend(generate_signature(abi.get('inputs', [])))
if len(signature_parts) > 1:
signature_parts[-1] = signature_parts[-1].rstrip(",") + ")"
else:
signature_parts.append(")")
return "".join(signature_parts)
def function_selector(abi):
"""Calculate 4-byte function selector using Keccak256 hash."""
signature = get_function_signature(abi)
hash_bytes = keccak(signature.encode('utf-8'))
return hash_bytes[:4].hex(), signature
def get_canonical_type(input_spec):
"""
Convert ABI input spec to canonical type string for eth_abi encoding.
Handles tuple expansion: tuple -> (address,uint256,bytes)
"""
param_type = input_spec['type']
if param_type.startswith('tuple'):
components = input_spec.get('components', [])
component_types = ','.join([get_canonical_type(comp) for comp in components])
canonical = f"({component_types})"
# Preserve array suffixes: tuple[] -> (address,uint256)[]
if param_type.endswith('[]'):
array_suffix = param_type[5:] # Everything after 'tuple'
canonical += array_suffix
return canonical
return param_type
def prepare_value(value, param_type, components=None):
"""
Convert Snowflake values to Python types suitable for eth_abi encoding.
Handles type coercion and format normalization for all Solidity types.
"""
# Handle null/None values with sensible defaults
if value is None:
if param_type.startswith('uint') or param_type.startswith('int'):
return 0
elif param_type == 'address':
return '0x' + '0' * 40
elif param_type == 'bool':
return False
elif param_type.startswith('bytes'):
return b''
else:
return value
# CRITICAL: Check arrays FIRST (before base types)
# This prevents bytes[] from matching the bytes check
if param_type.endswith('[]'):
base_type = param_type[:-2]
if not isinstance(value, list):
return []
# Special handling for tuple arrays
if base_type == 'tuple' and components:
return [prepare_tuple(v, components) for v in value]
else:
return [prepare_value(v, base_type) for v in value]
# Base type conversions
if param_type == 'address':
addr = str(value).lower()
if not addr.startswith('0x'):
addr = '0x' + addr
return addr
if param_type.startswith('uint') or param_type.startswith('int'):
return int(value)
if param_type == 'bool':
if isinstance(value, str):
return value.lower() in ('true', '1', 'yes')
return bool(value)
if param_type.startswith('bytes'):
if isinstance(value, str):
if value.startswith('0x'):
value = value[2:]
return bytes.fromhex(value)
return value
if param_type == 'string':
return str(value)
return value
def prepare_tuple(value, components):
"""
Recursively prepare tuple values, handling nested structures.
Tuples can contain other tuples, arrays, or tuple arrays.
"""
if not isinstance(value, (list, tuple)):
# Support dict-style input (by component name)
if isinstance(value, dict):
value = [value.get(comp.get('name', f'field_{i}'))
for i, comp in enumerate(components)]
else:
return value
result = []
for i, comp in enumerate(components):
if i >= len(value):
result.append(None)
continue
comp_type = comp['type']
val = value[i]
# Handle tuple arrays within tuples
if comp_type.endswith('[]') and comp_type.startswith('tuple'):
sub_components = comp.get('components', [])
result.append(prepare_value(val, comp_type, sub_components))
elif comp_type.startswith('tuple'):
# Single tuple (not array)
sub_components = comp.get('components', [])
result.append(prepare_tuple(val, sub_components))
else:
result.append(prepare_value(val, comp_type))
return tuple(result)
try:
inputs = function_abi.get('inputs', [])
# Calculate selector using battle-tested signature generation
selector_hex, signature = function_selector(function_abi)
# Functions with no inputs only need the selector
if not inputs:
return '0x' + selector_hex
# Prepare values for encoding
prepared_values = []
for i, inp in enumerate(inputs):
if i >= len(input_values):
prepared_values.append(None)
continue
value = input_values[i]
param_type = inp['type']
# Handle tuple arrays at top level
if param_type.endswith('[]') and param_type.startswith('tuple'):
components = inp.get('components', [])
prepared_values.append(prepare_value(value, param_type, components))
elif param_type.startswith('tuple'):
# Single tuple (not array)
components = inp.get('components', [])
prepared_values.append(prepare_tuple(value, components))
else:
prepared_values.append(prepare_value(value, param_type))
# Get canonical type strings for eth_abi (expands tuples)
types = [get_canonical_type(inp) for inp in inputs]
# Encode parameters using eth_abi
encoded_params = eth_abi.encode(types, prepared_values).hex()
# Return complete calldata: selector + encoded params
return '0x' + selector_hex + encoded_params
except Exception as e:
# Return structured error for debugging
import traceback
return json.dumps({
'error': str(e),
'traceback': traceback.format_exc(),
'function': function_abi.get('name', 'unknown'),
'signature': signature if 'signature' in locals() else 'not computed',
'selector': '0x' + selector_hex if 'selector_hex' in locals() else 'not computed',
'types': types if 'types' in locals() else 'not computed'
})
{% endmacro %}

View File

@ -1,8 +1,3 @@
packages:
- package: calogica/dbt_expectations
version: [">=0.8.0", "<0.9.0"]
- package: dbt-labs/dbt_utils
version: [">=1.0.0", "<1.1.0"]
- git: https://github.com/FlipsideCrypto/livequery-models.git
revision: "v1.8.0"
revision: "v1.10.2"