mirror of
https://github.com/FlipsideCrypto/optimism-models.git
synced 2026-02-06 16:01:54 +00:00
An 3280 aws lambda user (#104)
* added streamline api_integration macros * added api integrations, udft and streamline udfs * added setup documentation --------- Co-authored-by: shah <info@shahnewazkhan.ca>
This commit is contained in:
parent
f7f1b25817
commit
239ce2b195
@ -44,4 +44,6 @@ on-run-end:
|
||||
vars:
|
||||
"dbt_date:time_zone": GMT
|
||||
UPDATE_UDFS_AND_SPS: False
|
||||
UPDATE_SNOWFLAKE_TAGS: True
|
||||
UPDATE_SNOWFLAKE_TAGS: True
|
||||
STREAMLINE_INVOKE_STREAMS: False
|
||||
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
|
||||
25
macros/streamline/api_integrations.sql
Normal file
25
macros/streamline/api_integrations.sql
Normal file
@ -0,0 +1,25 @@
|
||||
{% macro create_aws_optimism_api() %}
|
||||
{{ log("Creating integration for target:" ~ target) }}
|
||||
{% if target.name == "prod" %}
|
||||
{% set sql %}
|
||||
CREATE api integration IF NOT EXISTS aws_optimism_api_prod api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/snowflake-api-optimism' api_allowed_prefixes = (
|
||||
'<PROD_URL_PLACEHOLDER>'
|
||||
) enabled = TRUE;
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% elif target.name == "dev" %}
|
||||
{% set sql %}
|
||||
CREATE api integration IF NOT EXISTS aws_optimism_api_dev api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/snowflake-api-optimism' api_allowed_prefixes = (
|
||||
'https://4sovbxzgsf.execute-api.us-east-1.amazonaws.com/dev/'
|
||||
) enabled = TRUE;
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% elif target.name == "sbx" %}
|
||||
{% set sql %}
|
||||
CREATE api integration IF NOT EXISTS aws_optimism_api_sbx_shah api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::579011195466:role/snowflake-api-optimism' api_allowed_prefixes = (
|
||||
'https://3ifufl19z4.execute-api.us-east-1.amazonaws.com/sbx/'
|
||||
) enabled = TRUE;
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
23
macros/streamline/get_base_table_udft.sql
Normal file
23
macros/streamline/get_base_table_udft.sql
Normal file
@ -0,0 +1,23 @@
|
||||
{% macro create_udtf_get_base_table(schema) %}
|
||||
create or replace function {{ schema }}.udtf_get_base_table(max_height integer)
|
||||
returns table (height number)
|
||||
as
|
||||
$$
|
||||
with base as (
|
||||
select
|
||||
row_number() over (
|
||||
order by
|
||||
seq4()
|
||||
) as id
|
||||
from
|
||||
table(generator(rowcount => 100000000))
|
||||
)
|
||||
select
|
||||
id as height
|
||||
from
|
||||
base
|
||||
where
|
||||
id <= max_height
|
||||
$$
|
||||
;
|
||||
{% endmacro %}
|
||||
30
macros/streamline/streamline_udfs.sql
Normal file
30
macros/streamline/streamline_udfs.sql
Normal file
@ -0,0 +1,30 @@
|
||||
{% macro create_udf_get_chainhead() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_get_chainhead() returns variant api_integration = aws_avalanche_api AS {% if target.name == "prod" %}
|
||||
'<REPLACE_WITH_PROD_URI>/get_chainhead'
|
||||
{% else %}
|
||||
'https://3ifufl19z4.execute-api.us-east-1.amazonaws.com/sbx/get_chainhead'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_bulk_json_rpc() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_json_rpc(
|
||||
json variant
|
||||
) returns text api_integration = aws_terra_api AS {% if target.name == "prod" %}
|
||||
'<REPLACE_WITH_PROD_URI>/udf_bulk_json_rpc'
|
||||
{% else %}
|
||||
'https://3ifufl19z4.execute-api.us-east-1.amazonaws.com/sbx/udf_bulk_json_rpc'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_udf_bulk_decode_logs() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_decode_logs(
|
||||
json OBJECT
|
||||
) returns ARRAY api_integration = aws_arbitrum_api AS {% if target.name == "prod" %}
|
||||
'<REPLACE_WITH_PROD_URI>/prod/bulk_decode_logs'
|
||||
{% else %}
|
||||
'https://3ifufl19z4.execute-api.us-east-1.amazonaws.com/sbx/bulk_decode_logs'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
99
models/silver/streamline/streamline-sbx-setup.md
Normal file
99
models/silver/streamline/streamline-sbx-setup.md
Normal file
@ -0,0 +1,99 @@
|
||||
## Sandbox integration setup
|
||||
|
||||
In order to perform a `sandbox` `streamline` integration you need to  with your `sbx api gateway` endpoint.
|
||||
|
||||
### DBT Global config
|
||||
- The first step is to configure your `global dbt` profile:
|
||||
|
||||
```zsh
|
||||
# create dbt global config
|
||||
touch ~/.dbt/profiles.yaml
|
||||
```
|
||||
|
||||
- And add the following into `~/.dbt/profiles.yaml`
|
||||
|
||||
```yaml
|
||||
optimism:
|
||||
target: sbx
|
||||
outputs:
|
||||
sbx:
|
||||
type: snowflake
|
||||
account: vna27887.us-east-1
|
||||
role: DBT_CLOUD_OPTIMISM
|
||||
user: <REPLACE_WIHT_YOUR_USER>@flipsidecrypto.com
|
||||
authenticator: externalbrowser
|
||||
region: us-east-1
|
||||
database: OPTIMISM_DEV
|
||||
warehouse: DBT
|
||||
schema: STREAMLINE
|
||||
threads: 12
|
||||
client_session_keep_alive: False
|
||||
query_tag: dbt_<REPLACE_WITH_YOUR_USER>_dev
|
||||
```
|
||||
|
||||
### Create user & role for streamline lambdas to use and apply the appropriate roles
|
||||
|
||||
```sql
|
||||
-- Create OPTIMISM_DEV.streamline schema
|
||||
CREATE SCHEMA OPTIMISM_DEV.STREAMLINE
|
||||
|
||||
CREATE ROLE AWS_LAMBDA_OPTIMISM_API_SBX;
|
||||
|
||||
CREATE USER AWS_LAMBDA_OPTIMISM_API_SBX PASSWORD='abc123' DEFAULT_ROLE = AWS_LAMBDA_OPTIMISM_API_SBX MUST_CHANGE_PASSWORD = TRUE;
|
||||
|
||||
GRANT SELECT ON ALL VIEWS IN SCHEMA OPTIMISM_DEV.STREAMLINE TO ROLE AWS_LAMBDA_OPTIMISM_API_SBX;
|
||||
|
||||
GRANT ROLE AWS_LAMBDA_OPTIMISM_API TO USER AWS_LAMBDA_OPTIMISM_API;
|
||||
|
||||
-- Note that the password must meet Snowflake's password requirements, which include a minimum length of 8 characters, at least one uppercase letter, at least one lowercase letter, and at least one number or special character.
|
||||
|
||||
ALTER USER AWS_LAMBDA_OPTIMISM_API_SBX SET PASSWORD = 'new_password';
|
||||
```
|
||||
### Register Snowflake integration and UDF's
|
||||
|
||||
- Register the  either manually on `snowsight worksheet` or via `dbt`
|
||||
|
||||
```sql
|
||||
-- Manually run on snowflake
|
||||
CREATE api integration IF NOT EXISTS aws_optimism_api_sbx_shah api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::579011195466:role/snowflake-api-optimism' api_allowed_prefixes = (
|
||||
'https://3ifufl19z4.execute-api.us-east-1.amazonaws.com/sbx/'
|
||||
) enabled = TRUE;
|
||||
```
|
||||
|
||||
```zsh
|
||||
# Use dbt to run create_aws_optimism_api macro
|
||||
dbt run-operation create_aws_optimism_api --target dev
|
||||
```
|
||||
|
||||
- Add the UDF to the 
|
||||
- Register UDF
|
||||
|
||||
```sql
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_json_rpc(json variant) returns text api_integration = aws_optimism_api_sbx_shah AS 'https://3ifufl19z4.execute-api.us-east-1.amazonaws.com/sbx/udf_bulk_json_rpc';
|
||||
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_json_rpc(json variant) returns text api_integration = aws_optimism_api_sbx_shah AS 'https://3ifufl19z4.execute-api.us-east-1.amazonaws.com/sbx/bulk_decode_logs';
|
||||
```
|
||||
|
||||
- Add the  model
|
||||
- Add the  model
|
||||
- Add the  macro
|
||||
|
||||
- Grant privileges to `AWS_LAMBDA_OPTIMISMT_API`
|
||||
|
||||
```sql
|
||||
GRANT SELECT ON VIEW streamline.pc_getBlock_realtime TO ROLE AWS_LAMBDA_TERRA_API;
|
||||
|
||||
GRANT USAGE ON DATABASE OPTIMISM_DEV TO ROLE AWS_LAMBDA_TERRA_API;
|
||||
```
|
||||
|
||||
## Run decode models
|
||||
|
||||
```zsh
|
||||
# SBX
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True, "STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES": True}' -m 1+models/silver/streamline/decoder/streamline__decode_logs_realtime.sql --profile optimism --target sbx --profiles-dir ~/.dbt
|
||||
|
||||
# DEV
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True, "STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES": True}' -m 1+models/silver/streamline/decoder/streamline__decode_logs_realtime.sql --profile optimism --target dev --profiles-dir ~/.dbt
|
||||
```
|
||||
Loading…
Reference in New Issue
Block a user