Merge pull request #2 from FlipsideCrypto/delete-files

update-files
This commit is contained in:
drethereum 2023-05-23 08:23:07 -06:00 committed by GitHub
commit 22fedea87c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 141 additions and 804 deletions

View File

@ -1,68 +0,0 @@
name: docs_update
on:
push:
branches:
- "main"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
scheduled_run:
name: docs_update
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
with:
python-version: "3.7.x"
- name: install dependencies
run: |
pip install dbt-snowflake==${{ vars.DBT_VERSION }}
dbt deps
- name: checkout docs branch
run: |
git checkout -b docs origin/main
- name: generate dbt docs
run: dbt docs generate -t prod
- name: move files to docs directory
run: |
mkdir -p ./docs
cp target/{catalog.json,manifest.json,index.html} docs/
- name: clean up target directory
run: dbt clean
- name: check for changes
run: git status
- name: stage changed files
run: git add .
- name: commit changed files
run: |
git config user.email "abc@xyz"
git config user.name "github-actions"
git commit -am "Auto-update docs"
- name: push changes to docs
run: |
git push -f --set-upstream origin docs

View File

@ -1,68 +0,0 @@
name: dbt_run_adhoc
run-name: dbt_run_adhoc
on:
workflow_dispatch:
branches:
- "main"
inputs:
environment:
type: choice
description: DBT Run Environment
required: true
options:
- dev
- prod
default: dev
warehouse:
type: choice
description: Snowflake warehouse
required: true
options:
- DBT
- DBT_CLOUD
- DBT_EMERGENCY
default: DBT
dbt_command:
type: string
description: 'DBT Run Command'
required: true
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ inputs.warehouse }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_${{ inputs.environment }}
strategy:
matrix:
command: ${{fromJson(inputs.dbt_command)}}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
with:
python-version: "3.7.x"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
${{ matrix.command }}

View File

@ -1,44 +0,0 @@
name: dbt_run_dev_refresh
run-name: dbt_run_dev_refresh
on:
workflow_dispatch:
schedule:
# Runs "at 9:00 UTC" (see https://crontab.guru)
- cron: '0 9 * * *'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
with:
python-version: "3.7.x"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
dbt run-operation run_sp_create_prod_clone

139
README.md
View File

@ -1,42 +1,6 @@
# LiveQuery Models
# Flipside Utility Functions
Dbt repo for managing LiveQuery database.
## Profile Set Up
Use the following within profiles.yml
```yml
livequery:
target: dev
outputs:
dev:
type: snowflake
account: <ACCOUNT>
role: <ROLE>
user: <USERNAME>
password: <PASSWORD>
region: <REGION>
database: LIVEQUERY_DEV
warehouse: <WAREHOUSE>
schema: silver
threads: 12
client_session_keep_alive: False
query_tag: <TAG>
prod:
type: snowflake
account: <ACCOUNT>
role: <ROLE>
user: <USERNAME>
password: <PASSWORD>
region: <REGION>
database: LIVEQUERY_DEV
warehouse: <WAREHOUSE>
schema: silver
threads: 12
client_session_keep_alive: False
query_tag: <TAG>
```
Dbt repo for managing the Flipside Utility Functions (FSC_UTILS) dbt package.
## Variables
@ -58,6 +22,63 @@ dbt run-operation create_udfs --var 'UPDATE_UDFS_AND_SPS": True' --args 'drop_:f
dbt run-operation create_udfs --var 'UPDATE_UDFS_AND_SPS": True' --args 'drop_:true'
```
## Adding the `fsc_utils` dbt package
The `fsc_utils` dbt package is a centralized repository consisting of various dbt macros and snowflake functions that can be utilized across other repos.
1. Navigate to the `create_udfs.sql` macro in your respective repo where you want to install the package.
2. Add the following:
```
{% set name %}
{{- fsc_utils.create_udfs() -}}
{% endset %}
{% do run_query(sql) %}
```
3. Note: fsc_utils.create_udfs() takes two parameters (drop_=False, schema="utils"). Set `drop_` to `True` to drop existing functions or define `schema` for the functions (default set to `utils`). Params not required.
4. Navigate to `packages.yml` in your respective repo.
5. Add the following:
```
- git: https://github.com/FlipsideCrypto/fsc-utils.git
```
6. Run `dbt deps` to install the package
7. Run the macro `dbt run-operation create_udfs --var '{"UPDATE_UDFS_AND_SPS":True}'`
### **Overview of Available Functions**
#### **UTILS Functions**
- `utils.udf_hex_to_int`: Use this UDF to transform any hex string to integer
```
ex: Curve Swaps
SELECT
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
utils.hex_to_int(segmented_data [1] :: STRING) :: INTEGER AS tokens_sold
FROM
optimism.core.fact_event_logs
WHERE
topics [0] :: STRING IN (
'0x8b3e96f2b889fa771c53c981b40daf005f63f637f1869f707052d15a3dd97140',
'0xd013ca23e77a65003c2c659c5442c00c805371b7fc1ebd4c206c41d1536bd90b'
)
```
- `utils.udf_hex_to_string`: Use this UDF to transform any hexadecimal string to a regular string, removing any non-printable or control characters from the resulting string.
```
ex: Token Names
WITH base AS (
SELECT
'0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000005452617265202d204368616e74616c20486167656c202d20576f6d656e2773204575726f2032303232202d2032303232205371756164202d20576f6d656e2773204e6174696f6e616c205465616d202d2032303232000000000000000000000000' AS input_token_name
)
SELECT
utils.udf_hex_to_string(SUBSTR(input_token_name,(64*2+3),LEN(input_token_name))) AS output_token_name
FROM base;
NOTE: The expression 64 * 2 + 3 in the query navigates to the 131st character of the hexadecimal string returned by an EVM blockchain contract's function, skipping metadata and adjusting for Snowflake's 1-based indexing. Keep in mind that the exact start of relevant data may vary between different contracts and functions.
```
## Resources
* Learn more about dbt [in the docs](https://docs.getdbt.com/docs/introduction)
@ -65,45 +86,3 @@ dbt run-operation create_udfs --var 'UPDATE_UDFS_AND_SPS": True' --args 'drop_:t
* Join the [chat](https://community.getdbt.com/) on Slack for live discussions and support
* Find [dbt events](https://events.getdbt.com) near you
* Check out [the blog](https://blog.getdbt.com/) for the latest news on dbt's development and best practices
## Applying Model Tags
## Database / Schema level tags
Database and schema tags are applied via the `add_database_or_schema_tags` macro. These tags are inherited by their downstream objects. To add/modify tags call the appropriate tag set function within the macro.
```jinja
{{ set_database_tag_value('SOME_DATABASE_TAG_KEY','SOME_DATABASE_TAG_VALUE') }}
{{ set_schema_tag_value('SOME_SCHEMA_TAG_KEY','SOME_SCHEMA_TAG_VALUE') }}
```
### Model tags
To add/update a model's snowflake tags, add/modify the `meta` model property under `config` . Only table level tags are supported at this time via DBT.
```jinja
{{ config(
...
meta={
'database_tags':{
'table': {
'PURPOSE': 'SOME_PURPOSE'
}
}
},
...
) }}
```
By default, model tags are pushed to Snowflake on each load. You can disable this by setting the `UPDATE_SNOWFLAKE_TAGS` project variable to `False` during a run.
```sh
dbt run --var '{"UPDATE_SNOWFLAKE_TAGS":False}' -s models/core/core__fact_blocks.sql
```
### Querying for existing tags on a model in snowflake
```sql
select *
from table(livequery.information_schema.tag_references('livequery.core.fact_blocks', 'table'));
```

View File

@ -1,8 +0,0 @@
{%- set name -%}
{{- udf_configs() -}}
{% endset %}
{% set udfs = fromyaml(name) %}
{%- for udf in udfs -%}
{{- create_or_drop_function_from_config(udf, drop_=True) -}}
{{- create_or_drop_function_from_config(udf, drop_=False) -}}
{% endfor %}

View File

@ -1,12 +0,0 @@
Project,Endpoint,Type,Documentation
Snapshot,https://hub.snapshot.org/graphql,GraphQL,https://docs.snapshot.org/graphql-api
Defillama,https://api.llama.fi/,REST,https://defillama.com/docs/api
Defillama,https://yields.llama.fi/,REST,https://defillama.com/docs/api
Defillama,https://stablecoins.llama.fi/,REST,https://defillama.com/docs/api
Defillama,https://bridges.llama.fi/,REST,https://defillama.com/docs/api
Defillama,https://coins.llama.fi/,REST,https://defillama.com/docs/api
zkSync,https://api.zksync.io/api/v0.2/,REST,https://docs.zksync.io/apiv02-docs/
DeepNFT Value,https://api.deepnftvalue.com/v1,REST,https://deepnftvalue.readme.io/reference/getting-started-with-deepnftvalue-api
Zapper,https://api.zapper.fi/v2/,REST,https://api.zapper.fi/api/static/index.html#/Apps/AppsController_getApps
Helius,https://api.helius.xyz,REST,https://docs.helius.xyz/introduction/why-helius
Stargaze Name Service,https://rest.stargaze-apis.com,REST,https://github.com/public-awesome/names/blob/main/API.md
1 Project Endpoint Type Documentation
2 Snapshot https://hub.snapshot.org/graphql GraphQL https://docs.snapshot.org/graphql-api
3 Defillama https://api.llama.fi/ REST https://defillama.com/docs/api
4 Defillama https://yields.llama.fi/ REST https://defillama.com/docs/api
5 Defillama https://stablecoins.llama.fi/ REST https://defillama.com/docs/api
6 Defillama https://bridges.llama.fi/ REST https://defillama.com/docs/api
7 Defillama https://coins.llama.fi/ REST https://defillama.com/docs/api
8 zkSync https://api.zksync.io/api/v0.2/ REST https://docs.zksync.io/apiv02-docs/
9 DeepNFT Value https://api.deepnftvalue.com/v1 REST https://deepnftvalue.readme.io/reference/getting-started-with-deepnftvalue-api
10 Zapper https://api.zapper.fi/v2/ REST https://api.zapper.fi/api/static/index.html#/Apps/AppsController_getApps
11 Helius https://api.helius.xyz REST https://docs.helius.xyz/introduction/why-helius
12 Stargaze Name Service https://rest.stargaze-apis.com REST https://github.com/public-awesome/names/blob/main/API.md

View File

@ -1,12 +1,12 @@
# Name your project! Project names should contain only lowercase characters
# and underscores. A good package name should reflect your organization's
# name or the intended use of these models
name: "livequery_models"
name: "fsc_utils"
version: "1.0.0"
config-version: 2
# This setting configures which "profile" dbt uses for this project.
profile: "livequery"
# profile: "fsc_utils"
# These configurations specify where dbt should look for different types of files.
# The `source-paths` config, for example, states that models in this project can be
@ -27,13 +27,6 @@ clean-targets: # directories to be removed by `dbt clean`
tests:
+store_failures: true # all tests
on-run-start:
- "{{ create_sps() }}"
- "{{ create_udfs() }}"
on-run-end:
- "{{ apply_meta_as_tags(results) }}"
# Configuring models
# Full documentation: https://docs.getdbt.com/docs/configuring-models
@ -44,10 +37,4 @@ on-run-end:
vars:
"dbt_date:time_zone": GMT
UPDATE_UDFS_AND_SPS: false
UPDATE_SNOWFLAKE_TAGS: true
STREAMLINE_INVOKE_STREAMS: False
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
REST_API_ID_PROD: hn8uqhku77
REST_API_ID_DEV: hn8uqhku77
API_INTEGRATION: AWS_LIVE_QUERY{{ '_DEV' if target.name != 'prod' else '' }}
AWS_REGION: us-east-1

View File

@ -1,8 +1,5 @@
{% macro create_sps() %}
{# {% macro create_sps() %}
{% if var("UPDATE_UDFS_AND_SPS") %}
{% if target.database == 'LIVEQUERY' %}
CREATE schema IF NOT EXISTS _internal;
{{ sp_create_prod_clone('_internal') }};
{% endif %}
{% endif %}
{% endmacro %}
{% endmacro %} #}

View File

@ -1,18 +1,13 @@
{% macro create_udfs(drop_=False) %}
{% macro create_udfs(drop_=False,schema="utils") %}
{% if var("UPDATE_UDFS_AND_SPS") %}
{% set name %}
{{- udf_configs() -}}
{{- fsc_utils.udf_configs(schema) -}}
{% endset %}
{% set udfs = fromyaml(name) %}
{% set sql %}
CREATE schema if NOT EXISTS silver;
CREATE schema if NOT EXISTS beta;
CREATE schema if NOT EXISTS utils;
CREATE schema if NOT EXISTS _utils;
CREATE schema if NOT EXISTS _live;
CREATE schema if NOT EXISTS live;
CREATE schema if NOT EXISTS {{ schema }};
{%- for udf in udfs -%}
{{- create_or_drop_function_from_config(udf, drop_=drop_) -}}
{{- fsc_utils.create_or_drop_function_from_config(udf, drop_=drop_) -}}
{% endfor %}
{% endset %}
{% do run_query(sql) %}

View File

@ -1,10 +0,0 @@
{% macro run_sp_create_prod_clone() %}
{% set clone_query %}
call livequery._internal.create_prod_clone(
'livequery',
'livequery_dev',
'livequery_dev_owner'
);
{% endset %}
{% do run_query(clone_query) %}
{% endmacro %}

View File

@ -1,44 +0,0 @@
{% macro sp_create_prod_clone(target_schema) -%}
create or replace procedure {{ target_schema }}.create_prod_clone(source_db_name string, destination_db_name string, role_name string)
returns boolean
language javascript
execute as caller
as
$$
snowflake.execute({sqlText: `BEGIN TRANSACTION;`});
try {
snowflake.execute({sqlText: `CREATE OR REPLACE DATABASE ${DESTINATION_DB_NAME} CLONE ${SOURCE_DB_NAME}`});
snowflake.execute({sqlText: `DROP SCHEMA IF EXISTS ${DESTINATION_DB_NAME}._INTERNAL`}); /* this only needs to be in prod */
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL SCHEMAS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL FUNCTIONS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL PROCEDURES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL VIEWS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL STAGES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL TABLES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE FUNCTIONS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE PROCEDURES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE VIEWS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE STAGES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE TABLES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`})
var existing_tags = snowflake.execute({sqlText: `SHOW TAGS IN DATABASE ${DESTINATION_DB_NAME};`});
while (existing_tags.next()) {
var schema = existing_tags.getColumnValue(4);
var tag_name = existing_tags.getColumnValue(2)
snowflake.execute({sqlText: `GRANT OWNERSHIP ON TAG ${DESTINATION_DB_NAME}.${schema}.${tag_name} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
}
snowflake.execute({sqlText: `COMMIT;`});
} catch (err) {
snowflake.execute({sqlText: `ROLLBACK;`});
throw(err);
}
return true
$$
{%- endmacro %}

View File

@ -1,11 +0,0 @@
{% macro create_aws_ethereum_api() %}
{% if target.name == "prod" %}
{% set sql %}
CREATE api integration IF NOT EXISTS aws_ethereum_api api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::661245089684:role/snowflake-api-ethereum' api_allowed_prefixes = (
'https://e03pt6v501.execute-api.us-east-1.amazonaws.com/prod/',
'https://mryeusnrob.execute-api.us-east-1.amazonaws.com/dev/'
) enabled = TRUE;
{% endset %}
{% do run_query(sql) %}
{% endif %}
{% endmacro %}

View File

@ -1,48 +1,10 @@
{% macro udf_configs() %}
{% macro udf_configs(schema) %}
{#
UTILITY SCHEMA
#}
- name: _utils.udf_introspect
signature:
- [echo, STRING]
func_type: SECURE EXTERNAL
return_type: TEXT
api_integration: '{{ var("API_INTEGRATION") }}'
sql: introspect
- name: _utils.udf_whoami
signature: []
func_type: SECURE
return_type: TEXT
options: NOT NULL STRICT IMMUTABLE MEMOIZABLE
sql: |
SELECT
COALESCE(SPLIT_PART(GETVARIABLE('QUERY_TAG_SESSION'), ',',2), CURRENT_USER())
- name: _utils.udf_register_secret
signature:
- [request_id, STRING]
- [user_id, STRING]
- [key, STRING]
return_type: TEXT
func_type: SECURE EXTERNAL
api_integration: '{{ var("API_INTEGRATION") }}'
options: NOT NULL STRICT
sql: secret/register
- name: utils.udf_register_secret
signature:
- [request_id, STRING]
- [key, STRING]
func_type: SECURE
return_type: TEXT
options: NOT NULL STRICT IMMUTABLE
sql: |
SELECT
_utils.UDF_REGISTER_SECRET(REQUEST_ID, _utils.UDF_WHOAMI(), KEY)
- name: utils.udf_hex_to_int
- name: {{ schema }}.udf_hex_to_int
signature:
- [hex, STRING]
return_type: TEXT
@ -53,8 +15,8 @@
RUNTIME_VERSION = '3.8'
HANDLER = 'hex_to_int'
sql: |
{{ python_hex_to_int() | indent(4) }}
- name: utils.udf_hex_to_int
{{ fsc_utils.python_hex_to_int() | indent(4) }}
- name: {{ schema }}.udf_hex_to_int
signature:
- [encoding, STRING]
- [hex, STRING]
@ -66,9 +28,9 @@
RUNTIME_VERSION = '3.8'
HANDLER = 'hex_to_int'
sql: |
{{ python_udf_hex_to_int_with_encoding() | indent(4) }}
{{ fsc_utils.python_udf_hex_to_int_with_encoding() | indent(4) }}
- name: utils.udf_hex_to_string
- name: {{ schema }}.udf_hex_to_string
signature:
- [hex, STRING]
return_type: TEXT
@ -82,42 +44,57 @@
try_hex_decode_string(hex),
'[\x00-\x1F\x7F-\x9F\xAD]', '', 1))
{#
LIVE SCHEMA
#}
- name: _live.udf_api
- name: {{ schema }}.udf_json_rpc_call
signature:
- [method, STRING]
- [url, STRING]
- [headers, OBJECT]
- [DATA, OBJECT]
- [user_id, STRING]
- [SECRET, STRING]
return_type: VARIANT
func_type: SECURE EXTERNAL
api_integration: '{{ var("API_INTEGRATION") }}'
options: NOT NULL STRICT
sql: udf_api
- name: live.udf_api
signature:
- [method, STRING]
- [url, STRING]
- [headers, OBJECT]
- [data, OBJECT]
- [secret_name, STRING]
return_type: VARIANT
func_type: SECURE
options: NOT NULL STRICT VOLATILE
- [params, ARRAY]
return_type: OBJECT
options: |
NULL
LANGUAGE SQL
RETURNS NULL ON NULL INPUT
IMMUTABLE
sql: |
SELECT
_live.UDF_API(
method,
url,
headers,
data,
_utils.UDF_WHOAMI(),
secret_name
)
{{ fsc_utils.sql_udf_json_rpc_call() }}
- name: {{ schema }}.udf_json_rpc_call
signature:
- [method, STRING]
- [params, OBJECT]
return_type: OBJECT
options: |
NULL
LANGUAGE SQL
RETURNS NULL ON NULL INPUT
IMMUTABLE
sql: |
{{ fsc_utils.sql_udf_json_rpc_call() }}
- name: {{ schema }}.udf_json_rpc_call
signature:
- [method, STRING]
- [params, OBJECT]
- [id, STRING]
return_type: OBJECT
options: |
NULL
LANGUAGE SQL
RETURNS NULL ON NULL INPUT
IMMUTABLE
sql: |
{{ fsc_utils.sql_udf_json_rpc_call(False) }}
- name: {{ schema }}.udf_json_rpc_call
signature:
- [method, STRING]
- [params, ARRAY]
- [id, STRING]
return_type: OBJECT
options: |
NULL
LANGUAGE SQL
RETURNS NULL ON NULL INPUT
IMMUTABLE
sql: |
{{ fsc_utils.sql_udf_json_rpc_call(False) }}
{% endmacro %}

View File

@ -0,0 +1,15 @@
{% macro sql_udf_json_rpc_call(use_default_id=True ) %}
SELECT IFF(method IS NULL or params IS NULL,
NULL,
{
'jsonrpc': '2.0',
'method': method,
'params': params
{% if use_default_id %}
, 'id': hash(method, params)::string
{% else %}
, 'id': id
{% endif %}
}
)
{% endmacro %}

View File

@ -1,22 +0,0 @@
{% macro create_udf_introspect(
drop_ = False
) %}
{% set name_ = 'silver.udf_introspect' %}
{% set signature = [('json', 'variant')] %}
{% set return_type = 'text' %}
{% set sql_ = construct_api_route("introspect") %}
{% if not drop_ %}
{{ create_sql_function(
name_ = name_,
signature = signature,
return_type = return_type,
sql_ = sql_,
api_integration = var("API_INTEGRATION")
) }}
{% else %}
{{ drop_function(
name_,
signature = signature,
) }}
{% endif %}
{% endmacro %}

View File

@ -2,7 +2,7 @@
func_name,
signature
) %}
DROP FUNCTION IF EXISTS {{ func_name }}({{ compile_signature(signature, drop_ = True) }});
DROP FUNCTION IF EXISTS {{ func_name }}({{ fsc_utils.compile_signature(signature, drop_ = True) }});
{% endmacro %}
{%- macro construct_api_route(route) -%}
@ -35,7 +35,7 @@
func_type = none
) %}
CREATE OR REPLACE {{ func_type }} FUNCTION {{ name_ }}(
{{- compile_signature(signature) }}
{{- fsc_utils.compile_signature(signature) }}
)
COPY GRANTS
RETURNS {{ return_type }}
@ -44,7 +44,7 @@
{% endif %}
{%- if api_integration -%}
api_integration = {{ api_integration }}
AS {{ construct_api_route(sql_) ~ ";" }}
AS {{ fsc_utils.construct_api_route(sql_) ~ ";" }}
{% else -%}
AS
$$
@ -66,7 +66,7 @@
{% set func_type = config ["func_type"] %}
{% if not drop_ -%}
{{ create_sql_function(
{{ fsc_utils.create_sql_function(
name_ = name_,
signature = signature,
return_type = return_type,
@ -76,7 +76,7 @@
func_type = func_type
) }}
{%- else -%}
{{ drop_function(
{{ fsc_utils.drop_function(
name_,
signature = signature,
) }}

View File

@ -1,3 +0,0 @@
{% macro add_database_or_schema_tags() %}
{{ set_database_tag_value('BLOCKCHAIN_NAME','LIVEQUERY') }}
{% endmacro %}

View File

@ -1,127 +0,0 @@
{% macro apply_meta_as_tags(results) %}
{% if var("UPDATE_SNOWFLAKE_TAGS") %}
{{ log('apply_meta_as_tags', info=False) }}
{{ log(results, info=False) }}
{% if execute %}
{%- set tags_by_schema = {} -%}
{% for res in results -%}
{% if res.node.meta.database_tags %}
{%- set model_database = res.node.database -%}
{%- set model_schema = res.node.schema -%}
{%- set model_schema_full = model_database+'.'+model_schema -%}
{%- set model_alias = res.node.alias -%}
{% if model_schema_full not in tags_by_schema.keys() %}
{{ log('need to fetch tags for schema '+model_schema_full, info=False) }}
{%- call statement('main', fetch_result=True) -%}
show tags in {{model_database}}.{{model_schema}}
{%- endcall -%}
{%- set _ = tags_by_schema.update({model_schema_full: load_result('main')['table'].columns.get('name').values()|list}) -%}
{{ log('Added tags to cache', info=False) }}
{% else %}
{{ log('already have tag info for schema', info=False) }}
{% endif %}
{%- set current_tags_in_schema = tags_by_schema[model_schema_full] -%}
{{ log('current_tags_in_schema:', info=False) }}
{{ log(current_tags_in_schema, info=False) }}
{{ log("========== Processing tags for "+model_schema_full+"."+model_alias+" ==========", info=False) }}
{% set line -%}
node: {{ res.node.unique_id }}; status: {{ res.status }} (message: {{ res.message }})
node full: {{ res.node}}
meta: {{ res.node.meta}}
materialized: {{ res.node.config.materialized }}
{%- endset %}
{{ log(line, info=False) }}
{%- call statement('main', fetch_result=True) -%}
select LEVEL,UPPER(TAG_NAME) as TAG_NAME,TAG_VALUE from table(information_schema.tag_references_all_columns('{{model_schema}}.{{model_alias}}', 'table'))
{%- endcall -%}
{%- set existing_tags_for_table = load_result('main')['data'] -%}
{{ log('Existing tags for table:', info=False) }}
{{ log(existing_tags_for_table, info=False) }}
{{ log('--', info=False) }}
{% for table_tag in res.node.meta.database_tags.table %}
{{ create_tag_if_missing(current_tags_in_schema,table_tag|upper) }}
{% set desired_tag_value = res.node.meta.database_tags.table[table_tag] %}
{{set_table_tag_value_if_different(model_schema,model_alias,table_tag,desired_tag_value,existing_tags_for_table)}}
{% endfor %}
{{ log("========== Finished processing tags for "+model_alias+" ==========", info=False) }}
{% endif %}
{% endfor %}
{% endif %}
{% endif %}
{% endmacro %}
{% macro create_tag_if_missing(all_tag_names,table_tag) %}
{% if table_tag not in all_tag_names %}
{{ log('Creating missing tag '+table_tag, info=False) }}
{%- call statement('main', fetch_result=True) -%}
create tag if not exists silver.{{table_tag}}
{%- endcall -%}
{{ log(load_result('main').data, info=False) }}
{% else %}
{{ log('Tag already exists: '+table_tag, info=False) }}
{% endif %}
{% endmacro %}
{% macro set_table_tag_value_if_different(model_schema,table_name,tag_name,desired_tag_value,existing_tags) %}
{{ log('Ensuring tag '+tag_name+' has value '+desired_tag_value+' at table level', info=False) }}
{%- set existing_tag_for_table = existing_tags|selectattr('0','equalto','TABLE')|selectattr('1','equalto',tag_name|upper)|list -%}
{{ log('Filtered tags for table:', info=False) }}
{{ log(existing_tag_for_table[0], info=False) }}
{% if existing_tag_for_table|length > 0 and existing_tag_for_table[0][2]==desired_tag_value %}
{{ log('Correct tag value already exists', info=False) }}
{% else %}
{{ log('Setting tag value for '+tag_name+' to value '+desired_tag_value, info=False) }}
{%- call statement('main', fetch_result=True) -%}
alter table {{model_schema}}.{{table_name}} set tag {{tag_name}} = '{{desired_tag_value}}'
{%- endcall -%}
{{ log(load_result('main').data, info=False) }}
{% endif %}
{% endmacro %}
{% macro set_column_tag_value_if_different(table_name,column_name,tag_name,desired_tag_value,existing_tags) %}
{{ log('Ensuring tag '+tag_name+' has value '+desired_tag_value+' at column level', info=False) }}
{%- set existing_tag_for_column = existing_tags|selectattr('0','equalto','COLUMN')|selectattr('1','equalto',tag_name|upper)|list -%}
{{ log('Filtered tags for column:', info=False) }}
{{ log(existing_tag_for_column[0], info=False) }}
{% if existing_tag_for_column|length > 0 and existing_tag_for_column[0][2]==desired_tag_value %}
{{ log('Correct tag value already exists', info=False) }}
{% else %}
{{ log('Setting tag value for '+tag_name+' to value '+desired_tag_value, info=False) }}
{%- call statement('main', fetch_result=True) -%}
alter table {{table_name}} modify column {{column_name}} set tag {{tag_name}} = '{{desired_tag_value}}'
{%- endcall -%}
{{ log(load_result('main').data, info=False) }}
{% endif %}
{% endmacro %}
{% macro set_database_tag_value(tag_name,tag_value) %}
{% set query %}
create tag if not exists silver.{{tag_name}}
{% endset %}
{% do run_query(query) %}
{% set query %}
alter database {{target.database}} set tag {{target.database}}.silver.{{tag_name}} = '{{tag_value}}'
{% endset %}
{% do run_query(query) %}
{% endmacro %}
{% macro set_schema_tag_value(target_schema,tag_name,tag_value) %}
{% set query %}
create tag if not exists silver.{{tag_name}}
{% endset %}
{% do run_query(query) %}
{% set query %}
alter schema {{target.database}}.{{target_schema}} set tag {{target.database}}.silver.{{tag_name}} = '{{tag_value}}'
{% endset %}
{% do run_query(query) %}
{% endmacro %}

View File

@ -1,159 +0,0 @@
{% docs __overview__ %}
# Welcome to the Flipside Crypto LiveQuery Models Documentation!
## **What does this documentation cover?**
The documentation included here details the design of the LiveQuery functions available via [Flipside Crypto](https://flipsidecrypto.xyz/). For more information on how these functions are built, please see [the github repository.](https://github.com/FlipsideCrypto/livequery-models)
### **Overview of Available Functions**
#### **UTILS Functions**
- `utils.hex_to_int`: Use this UDF to transform any hex string to integer
```
ex: Curve Swaps
SELECT
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
utils.hex_to_int(segmented_data [1] :: STRING) :: INTEGER AS tokens_sold
FROM
optimism.core.fact_event_logs
WHERE
topics [0] :: STRING IN (
'0x8b3e96f2b889fa771c53c981b40daf005f63f637f1869f707052d15a3dd97140',
'0xd013ca23e77a65003c2c659c5442c00c805371b7fc1ebd4c206c41d1536bd90b'
)
```
- `utils.hex_encode_function` (coming soon)(Function VARCHAR): Use this UDF to hex encode any string
```
ex: Decimals Function Signature
SELECT
`decimals` AS function_name,
utils.hex_encode_function(`decimals()`) :: STRING AS text_sig,
LEFT(text_sig,10) AS function_sig,
'0x313ce567' AS expected_sig
```
- `utils.evm_decode_logs` (coming soon)
- `utils.udf_register_secret`
#### **LIVE Functions & Examples**
- `live.udf_api`(Method STRING, URL STRING, Headers OBJECT, Data OBJECT): Use this UDF to make a GET or POST request on any API
```
ex: Defillama GET request -> working with the output (JSON flatten)
WITH chain_base AS (
SELECT
ethereum.streamline.udf_api(
'GET','https://api.llama.fi/chains',{},{}
) AS read
)
SELECT
VALUE:chainId::STRING AS chain_id,
VALUE:name::STRING AS chain,
VALUE:tokenSymbol::STRING AS token_symbol
FROM chain_base,
LATERAL FLATTEN (input=> read:data)
ex: Solana Token Metadata
SELECT
live.udf_api(
'GET',
'https://public-api.solscan.io/token/meta?tokenAddress=SPraYi59a21jEhqvPBbWuwmjA4vdTaSLbiRTefcHJSR',
{ },
{ }
);
ex: Running with multiple token addresses at the same time
WITH solana_addresses AS (
SELECT
'SPraYi59a21jEhqvPBbWuwmjA4vdTaSLbiRTefcHJSR' AS address
UNION
SELECT
'4KbzSz2VF1LCvEaw8viq1335VgWzNjMd8rwQMsCkKHip'
)
SELECT
live.udf_api(
'GET',
concat(
'https://public-api.solscan.io/token/meta?tokenAddress=',
address
),
{ },
{ }
)
FROM
solana_addresses;
ex: Hit Quicknode (see instructions below for how to register an API Key with Flipside securely)
SELECT
live.udf_api(
'POST',
concat(
'http://sample-endpoint-name.network.quiknode.pro/',
'{my_key}'
),
{},
{ 'method' :'eth_blockNumber',
'params' :[],
'id' :1,
'jsonrpc' :'2.0' },
'quicknode'
);
```
##### **Registering and Using LiveQuery Credentials to Query Quicknode**
With LiveQuery you can safely store encrypted credentials, such as an API key, with Flipside, and query blockchain nodes directly via our SQL interface. Heres how:
1. Sign up for a free [Quicknode API Account](https://www.quicknode.com/core-api)
2. Navigate to ***Endpoints*** on the left hand side then click the ***Get Started*** tab and ***Copy*** the HTTP Provider Endpoint. Do not adjust the Setup or Security parameters.
3. Visit [Ephit](https://science.flipsidecrypto.xyz/ephit) to obtain an Ephemeral query that will securely link your API Endpoint to Flipside's backend. This will allow you to refer to the URL securely in our application without referencing it or exposing keys directly.
4. Fill out the form and click ***Submit this Credential***
5. Paste the provided query into [Flipside](https://flipside.new) and query your node directly in the app with your submitted Credential (`{my_key}`).
##### **API Endpoints available for use with LiveQuery:**
Please visit the Flipside discord or open a ticket for questions, concerns or more information.
- Snapshot: [https://hub.snapshot.org/graphql](https://docs.snapshot.org/graphql-api)
- Defillama: [https://api.llama.fi/](https://defillama.com/docs/api)
- Defillama: [https://yields.llama.fi/](https://defillama.com/docs/api)
- Defillama: [https://stablecoins.llama.fi/](https://defillama.com/docs/api)
- Defillama: [https://bridges.llama.fi/](https://defillama.com/docs/api)
- Defillama: [https://coins.llama.fi/](https://defillama.com/docs/api)
- zkSync: [https://api.zksync.io/api/v0.2/](https://docs.zksync.io/apiv02-docs/)
- DeepNFT Value: [https://api.deepnftvalue.com/v1](https://deepnftvalue.readme.io/reference/getting-started-with-deepnftvalue-api)
- Zapper: [https://api.zapper.fi/v2/](https://api.zapper.fi/api/static/index.html#/Apps/AppsController_getApps)
- Helius: [https://api.helius.xyz](https://docs.helius.xyz/introduction/why-helius)
- Stargaze Name Service: [https://rest.stargaze-apis.com](https://github.com/public-awesome/names/blob/main/API.md)
## **Using dbt docs**
### Navigation
You can use the ```Project``` and ```Database``` navigation tabs on the left side of the window to explore the models in the project.
### Database Tab
This view shows relations (tables and views) grouped into database schemas. Note that ephemeral models are *not* shown in this interface, as they do not exist in the database.
### Graph Exploration
You can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.
On model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the Expand button at the top-right of this lineage pane, you'll be able to see all of the models that are used to build, or are built from, the model you're exploring.
Once expanded, you'll be able to use the ```--models``` and ```--exclude``` model selection syntax to filter the models in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).
Note that you can also right-click on models to interactively filter and explore the graph.
### **More information**
- [Flipside](https://flipsidecrypto.xyz/)
- [Velocity](https://app.flipsidecrypto.com/velocity?nav=Discover)
- [Tutorials](https://docs.flipsidecrypto.com/our-data/tutorials)
- [Github](https://github.com/FlipsideCrypto/external-models)
- [What is dbt?](https://docs.getdbt.com/docs/introduction)
{% enddocs %}

View File

@ -1,8 +0,0 @@
version: 2
sources:
- name: crosschain
database: "{{ 'crosschain' if target.database == 'LIVEQUERY' else 'crosschain_dev' }}"
schema: core
tables:
- name: dim_date_hours

View File

@ -1,29 +0,0 @@
livequery:
target: prod
outputs:
dev:
type: snowflake
account: "{{ env_var('ACCOUNT') }}"
role: "{{ env_var('ROLE') }}"
user: "{{ env_var('USER') }}"
password: "{{ env_var('PASSWORD') }}"
region: "{{ env_var('REGION') }}"
database: "{{ env_var('DATABASE') }}"
warehouse: "{{ env_var('WAREHOUSE') }}"
schema: SILVER
threads: 4
client_session_keep_alive: False
query_tag: livequery_curator
prod:
type: snowflake
account: "{{ env_var('ACCOUNT') }}"
role: "{{ env_var('ROLE') }}"
user: "{{ env_var('USER') }}"
password: "{{ env_var('PASSWORD') }}"
region: "{{ env_var('REGION') }}"
database: "{{ env_var('DATABASE') }}"
warehouse: "{{ env_var('WAREHOUSE') }}"
schema: SILVER
threads: 4
client_session_keep_alive: False
query_tag: livequery_curator