Merge branch 'main' into update-streamline-history-typo

This commit is contained in:
xiuy001 2023-05-09 15:19:49 -04:00
commit 9f5bb8c5e4
16 changed files with 195 additions and 85 deletions

View File

@ -41,4 +41,5 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m models/silver/streamline/decoder --exclude models/silver/streamline/decoder/history
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m models/silver/streamline/decoder --exclude models/silver/streamline/decoder/history
dbt run -m models/silver/silver__decoded_logs.sql

View File

@ -3,8 +3,9 @@ run-name: dbt_run_streamline_decoder_history1
on:
workflow_dispatch:
branches:
- "main"
schedule:
# Runs "at 2:00 UTC PM" (see https://crontab.guru)
- cron: '0 14 * * *'
env:
DBT_PROFILES_DIR: ./

View File

@ -3,8 +3,9 @@ run-name: dbt_run_streamline_decoder_history2
on:
workflow_dispatch:
branches:
- "main"
schedule:
# Runs "at 4:00 UTC PM" (see https://crontab.guru)
- cron: '0 16 * * *'
env:
DBT_PROFILES_DIR: ./

View File

@ -3,8 +3,9 @@ run-name: dbt_run_streamline_decoder_history3
on:
workflow_dispatch:
branches:
- "main"
schedule:
# Runs "at 6:00 UTC PM" (see https://crontab.guru)
- cron: '0 18 * * *'
env:
DBT_PROFILES_DIR: ./

View File

@ -3,8 +3,9 @@ run-name: dbt_run_streamline_decoder_history4
on:
workflow_dispatch:
branches:
- "main"
schedule:
# Runs "at 8:00 UTC PM" (see https://crontab.guru)
- cron: '0 20 * * *'
env:
DBT_PROFILES_DIR: ./

View File

@ -3,8 +3,9 @@ run-name: dbt_run_streamline_decoder_history5
on:
workflow_dispatch:
branches:
- "main"
schedule:
# Runs "at 10:00 UTC PM" (see https://crontab.guru)
- cron: '0 22 * * *'
env:
DBT_PROFILES_DIR: ./

View File

@ -3,8 +3,9 @@ run-name: dbt_run_streamline_decoder_history6
on:
workflow_dispatch:
branches:
- "main"
schedule:
# Runs "at 12:00 UTC PM" (see https://crontab.guru)
- cron: '0 24 * * *'
env:
DBT_PROFILES_DIR: ./

View File

@ -1,44 +0,0 @@
name: dbt_run_temp_decoding_backfill
run-name: dbt_run_temp_decoding_backfill
on:
workflow_dispatch:
schedule:
# Runs "every 2 hours" (see https://crontab.guru)
- cron: '20 */2 * * *'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod_2xl
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
with:
python-version: "3.7.x"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m models/silver/silver__decoded_logs.sql

View File

@ -0,0 +1,13 @@
{% docs deprecation %}
Deprecating soon: This is a notice that we're only removing the below columns. Please migrate queries using these columns to `fact_decoded_event_logs`, `ez_decoded_event_logs` or use manual parsing of topics and data. The following columns will be deprecated on 6/6/23:
`Fact_event_logs` Columns:
- `event_name`
- `event_inputs`
- `contract_name`
`Fact_transactions` Columns:
- `tx_json`
{% enddocs %}

View File

@ -0,0 +1,27 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true }
) }}
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
C.name AS contract_name,
event_name,
decoded_flat AS decoded_log,
decoded_data AS full_decoded_log,
origin_function_signature,
origin_from_address,
origin_to_address,
topics,
DATA,
event_removed,
tx_status
FROM
{{ ref('silver__decoded_logs') }}
LEFT JOIN {{ ref('core__dim_contracts') }} C
ON contract_address = C.address

View File

@ -0,0 +1,59 @@
version: 2
models:
- name: core__ez_decoded_event_logs
description: >
'For information on how to submit a contract for decoding, as well as how ABIs are sourced, please visit [here](https://science.flipsidecrypto.xyz/abi-requestor/).
This model contains decoded event logs for contracts that we have an ABI for. Please note, this table does not include all event logs, only those that we have an ABI for.
The `decoded_log` column is the easiest place to query decoded data. It is a JSON object, where the keys are the names of the event parameters, and the values are the values of the event parameters.
You can select from this column using the following sample format, `decoded_log:from::string` or more generally, `decoded_log:<event_param>::datatype`. See below for a full sample query.
The `full_decoded_logs` column contains the same information, as well as additional fields such as the datatype of the decoded data. You may need to laterally flatten this column to query the data.
Sample query for USDC Transfer events:
```sql
select
tx_hash,
block_number,
contract_address,
decoded_log:from::string as from_address,
decoded_log:to::string as to_address,
decoded_log:value::integer as value
from ethereum.core.fact_decoded_event_logs
where contract_address = lower('0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48')
and block_number between 16400000 and 16405000
and event_name = 'Transfer'
limit 50```'
columns:
- name: BLOCK_NUMBER
description: '{{ doc("bsc_block_number") }}'
- name: BLOCK_TIMESTAMP
description: '{{ doc("bsc_block_timestamp") }}'
- name: TX_HASH
description: '{{ doc("bsc_logs_tx_hash") }}'
- name: EVENT_INDEX
description: '{{ doc("bsc_event_index") }}'
- name: CONTRACT_ADDRESS
description: '{{ doc("bsc_logs_contract_address") }}'
- name: CONTRACT_NAME
description: 'The name of the contract, if the contract has a name() function.'
- name: EVENT_NAME
description: 'The name of the event, as defined in the contract ABI.'
- name: DECODED_LOG
description: 'The flattened decoded log, where the keys are the names of the event parameters, and the values are the values of the event parameters.'
- name: FULL_DECODED_LOG
description: 'The full decoded log, including the event name, the event parameters, and the data type of the event parameters.'
- name: ORIGIN_FUNCTION_SIGNATURE
description: '{{ doc("bsc_tx_origin_sig") }}'
- name: ORIGIN_FROM_ADDRESS
description: '{{ doc("bsc_origin_from") }}'
- name: ORIGIN_TO_ADDRESS
description: '{{ doc("bsc_origin_to") }}'
- name: TOPICS
description: '{{ doc("bsc_topics") }}'
- name: DATA
description: '{{ doc("bsc_logs_data") }}'
- name: EVENT_REMOVED
description: '{{ doc("bsc_event_removed") }}'
- name: TX_STATUS
description: '{{ doc("bsc_tx_status") }}'

View File

@ -0,0 +1,17 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true }
) }}
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
event_name,
decoded_flat AS decoded_log,
decoded_data AS full_decoded_log
FROM
{{ ref('silver__decoded_logs') }}

View File

@ -0,0 +1,44 @@
version: 2
models:
- name: core__fact_decoded_event_logs
description: >
'For information on how to submit a contract for decoding, as well as how ABIs are sourced, please visit [here](https://science.flipsidecrypto.xyz/abi-requestor/).
This model contains decoded event logs for contracts that we have an ABI for. Please note, this table does not include all event logs, only those that we have an ABI for.
This table will perform better than the `core__ez_decoded_event_logs` table, but does not include as many columns.
The `decoded_log` column is the easiest place to query decoded data. It is a JSON object, where the keys are the names of the event parameters, and the values are the values of the event parameters.
You can select from this column using the following sample format, `decoded_log:from::string` or more generally, `decoded_log:<event_param>::datatype`. See below for a full sample query.
The `full_decoded_logs` column contains the same information, as well as additional fields such as the datatype of the decoded data. You may need to laterally flatten this column to query the data.
Sample query for USDC Transfer events:
```sql
select
tx_hash,
block_number,
contract_address,
decoded_log:from::string as from_address,
decoded_log:to::string as to_address,
decoded_log:value::integer as value
from ethereum.core.fact_decoded_event_logs
where contract_address = lower('0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48')
and block_number between 16400000 and 16405000
and event_name = 'Transfer'
limit 50```'
columns:
- name: BLOCK_NUMBER
description: '{{ doc("bsc_block_number") }}'
- name: BLOCK_TIMESTAMP
description: '{{ doc("bsc_block_timestamp") }}'
- name: TX_HASH
description: '{{ doc("bsc_logs_tx_hash") }}'
- name: EVENT_INDEX
description: '{{ doc("bsc_event_index") }}'
- name: CONTRACT_ADDRESS
description: '{{ doc("bsc_logs_contract_address") }}'
- name: EVENT_NAME
description: 'The name of the event, as defined in the contract ABI.'
- name: DECODED_LOG
description: 'The flattened decoded log, where the keys are the names of the event parameters, and the values are the values of the event parameters.'
- name: FULL_DECODED_LOG
description: 'The full decoded log, including the event name, the event parameters, and the data type of the event parameters.'

View File

@ -1,7 +1,7 @@
version: 2
models:
- name: core__fact_event_logs
description: '{{ doc("bsc_logs_table_doc") }}'
description: '{{ doc("deprecation") }}'
columns:
- name: BLOCK_NUMBER
@ -15,11 +15,11 @@ models:
- name: CONTRACT_ADDRESS
description: '{{ doc("bsc_logs_contract_address") }}'
- name: CONTRACT_NAME
description: '{{ doc("bsc_logs_contract_name") }}'
description: '{{ doc("deprecation") }}'
- name: EVENT_NAME
description: '{{ doc("bsc_event_name") }}'
description: '{{ doc("deprecation") }}'
- name: EVENT_INPUTS
description: '{{ doc("bsc_event_inputs") }}'
description: '{{ doc("deprecation") }}'
- name: TOPICS
description: '{{ doc("bsc_topics") }}'
- name: DATA

View File

@ -35,7 +35,7 @@ models:
- name: STATUS
description: '{{ doc("bsc_tx_status") }}'
- name: TX_JSON
description: '{{ doc("bsc_tx_json") }}'
description: '{{ doc("deprecation") }}'
- name: INPUT_DATA
description: '{{ doc("bsc_tx_input_data") }}'
- name: ORIGIN_FUNCTION_SIGNATURE

View File

@ -9,7 +9,7 @@
WITH meta AS (
SELECT
registered_on AS job_created_time,
job_created_time,
last_modified,
TO_DATE(
concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5))
@ -18,29 +18,16 @@ WITH meta AS (
file_name
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", "decoded_logs") }}'
information_schema.external_table_file_registration_history(
table_name => '{{ source( "bronze_streamline", "decoded_logs") }}',
start_time => (
SELECT
DATEADD('hour', -6, MAX(_INSERTED_TIMESTAMP))
FROM
{{ this }}
)
)
) A
WHERE
1 = 1
{% if is_incremental() %}
AND _partition_by_block_number BETWEEN (
SELECT
MAX(block_number) - 50000
FROM
{{ this }}
)
AND (
SELECT
MAX(block_number) + 500000
FROM
{{ this }}
)
{% else %}
AND _partition_by_block_number <= 8000000
{% endif %}
)
),
decoded_logs AS (
SELECT