finishing touches across many models. also introduces dim_labels

This commit is contained in:
Mike Stepanovic 2024-10-15 14:01:18 -06:00
parent 73ce67a6d9
commit 8f1fb2bb9c
30 changed files with 764 additions and 287 deletions

66
.github/workflows/dbt_run_adhoc.yml vendored Normal file
View File

@ -0,0 +1,66 @@
name: dbt_run_adhoc
run-name: ${{ inputs.dbt_command }}
on:
workflow_dispatch:
branches:
- "main"
inputs:
environment:
type: choice
description: DBT run environment
required: true
options:
- dev
- prod
default: dev
warehouse:
type: choice
description: Snowflake warehouse
required: true
options:
- DBT
- DBT_CLOUD
- DBT_EMERGENCY
default: DBT
dbt_command:
type: string
description: "DBT Run Command"
required: true
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ inputs.warehouse }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_${{ inputs.environment }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}

46
.github/workflows/dbt_run_core.yml vendored Normal file
View File

@ -0,0 +1,46 @@
name: dbt_run_core
run-name: dbt_run_core
on:
workflow_dispatch:
# schedule:
# # Runs "at minute 10, every hour" (see https://crontab.guru)
# - cron: '10 * * * *'
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m "aleo_models,tag:core"

View File

@ -0,0 +1,45 @@
name: dbt_run_dev_refresh
run-name: dbt_run_dev_refresh
on:
workflow_dispatch:
schedule:
# Runs "at 5:10 UTC" (see https://crontab.guru)
- cron: '10 5 * * *'
env:
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt workstream-dbt-core
dbt deps
- name: Run DBT Jobs
run: |
dbt run-operation run_sp_create_prod_clone

View File

@ -1,8 +0,0 @@
{{ config (
materialized = 'view',
enabled = false
) }}
{{ streamline_external_table_query_v2(
model = "transactions",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )"
) }}

View File

@ -1,8 +0,0 @@
{{ config (
materialized = 'view',
enabled = false
) }}
{{ streamline_external_table_FR_query_v2(
model = 'transactions',
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )"
) }}

View File

@ -1,34 +0,0 @@
{{ config(
materialized = 'incremental',
unique_key = "program_id",
tags = ['core','full_test'],
enabled = false
) }}
WITH programs AS (
SELECT
program_id
FROM
{{ ref('silver__programs') }}
),
mappings AS (
SELECT
program_id,
{{ target.database }}.live.udf_api(
'GET',
'{Service}/program/' || program_id || '/mappings',
OBJECT_CONSTRUCT(
'Content-Type',
'application/json'
),{},
'Vault/dev/aleo/mainnet'
) :data AS mappings
FROM
programs
)
SELECT
program_id,
mappings
FROM
mappings

View File

@ -0,0 +1,5 @@
{% docs address %}
The unique address referring to an entity on the blockchain.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs solution_id %}
A unique identifier for the prover solution.
{% enddocs %}

View File

@ -0,0 +1,36 @@
{{ config(
materialized = 'incremental',
unique_key = ['dim_labels_id'],
incremental_strategy = 'merge',
cluster_by = 'modified_timestamp::DATE',
merge_exclude_columns = ['inserted_timestamp'],
tags = ['core']
) }}
SELECT
'lava' AS blockchain,
creator,
address,
address_name,
label_type,
label_subtype,
project_name,
{{ dbt_utils.generate_surrogate_key(
[' address ']
) }} AS dim_labels_id,
SYSDATE() AS inserted_timestamp,
modified_timestamp
FROM
{{ ref('silver__labels') }}
{% if is_incremental() %}
WHERE
modified_timestamp >= (
SELECT
MAX(
modified_timestamp
)
FROM
{{ this }}
)
{% endif %}

View File

@ -0,0 +1,52 @@
version: 2
models:
- name: core__dim_labels
description: A hand curated table containing address names / labels for popular contracts, validators, tokens, etc.
columns:
- name: BLOCKCHAIN
description: The name of the blockchain
tests:
- not_null
- name: CREATOR
description: The name of the creator of the label
tests:
- not_null
- name: ADDRESS
description: Address that the label is for. This is the field that should be used to join other tables with labels.
tests:
- not_null
- name: ADDRESS_NAME
description: The name associated with the address
tests:
- not_null
- name: LABEL_TYPE
description: A high-level category describing the addresses main function or ownership
tests:
- not_null
- name: LABEL_SUBTYPE
description: A sub-category nested within label type providing further detail
tests:
- not_null
- name: PROJECT_NAME
description: Name of the project or entity associated with the address
tests:
- not_null
- name: DIM_LABELS_ID
description: Unique identifier for each label record
tests:
- not_null
- unique
- name: INSERTED_TIMESTAMP
description: '{{ doc("inserted_timestamp") }}'
tests:
- not_null
- name: MODIFIED_TIMESTAMP
description: '{{ doc("modified_timestamp") }}'
tests:
- not_null
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- BLOCKCHAIN
- ADDRESS

View File

@ -1,7 +1,8 @@
{{ config(
materialized = 'incremental',
unique_key = ['program_id'],
unique_key = ['dim_program_id'],
incremental_strategy = 'merge',
merge_exclude_columns = ['inserted_timestamp'],
tags = ['core','full_test']
) }}
@ -11,7 +12,6 @@ SELECT
program_id,
edition,
program,
{# mappings, #}
verifying_keys,
{{ dbt_utils.generate_surrogate_key(
['program_id']
@ -21,3 +21,8 @@ SELECT
'{{ invocation_id }}' AS invocation_id
FROM
{{ ref('silver__programs') }}
UNION ALL
SELECT
*
FROM
{{ ref('silver__custom_programs') }}

View File

@ -47,19 +47,10 @@ models:
column_type_list:
- STRING
- VARCHAR
# - name: MAPPINGS
# description: "{{ doc('mappings') }}"
# tests:
# - dbt_expectations.expect_column_to_exist
# - not_null
# - dbt_expectations.expect_column_values_to_be_in_type_list:
# column_type_list:
# - VARIANT
- name: VERIFYING_KEYS
description: "{{ doc('verifying_keys') }}"
tests:
- dbt_expectations.expect_column_to_exist
- not_null
- name: dim_program_id
description: '{{ doc("pk") }}'
tests:

View File

@ -1,10 +1,11 @@
{{ config(
materialized = 'incremental',
incremental_predicates = ['DBT_INTERNAL_DEST.block_timestamp::DATE >= (select min(block_timestamp::DATE) from ' ~ generate_tmp_view_name(this) ~ ')'],
unique_key = "fact_block_round_batches_id",
unique_key = ['fact_block_round_batches_id'],
incremental_strategy = 'merge',
merge_exclude_columns = ["inserted_timestamp"],
merge_exclude_columns = ['inserted_timestamp'],
cluster_by = ['block_timestamp::DATE'],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(batch_id,author,committee_id,transmission_ids);",
tags = ['core', 'full_test']
) }}

View File

@ -2,7 +2,7 @@ version: 2
models:
- name: core__fact_block_round_batches
description: Records of all rounds and batches for blocks that have occurred on the Aleo network, dating back to the genesis block.
description: Records of rounds and batches within blocks on the Aleo network, capturing the consensus and block production process from genesis onward.
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
@ -83,4 +83,4 @@ models:
description: '{{ doc("modified_timestamp") }}'
tests:
- dbt_expectations.expect_column_to_exist
- not_null
- not_null

View File

@ -1,10 +1,11 @@
{{ config(
materialized = 'incremental',
incremental_predicates = ['DBT_INTERNAL_DEST.block_timestamp::DATE >= (select min(block_timestamp::DATE) from ' ~ generate_tmp_view_name(this) ~ ')'],
unique_key = "fact_block_solutions_id",
unique_key = ['fact_block_solutions_id'],
incremental_strategy = 'merge',
merge_exclude_columns = ["inserted_timestamp"],
merge_exclude_columns = ['inserted_timestamp'],
cluster_by = ['block_timestamp::DATE'],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(solution_id,address,epoch_hash);",
tags = ['core', 'full_test']
) }}

View File

@ -2,7 +2,7 @@ version: 2
models:
- name: core__fact_block_solutions
description: Records of all prover solutions for each block.
description: Records of all prover solutions for each block, including block details, rewards, and solution-specific information.
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
@ -37,62 +37,40 @@ models:
column_type_list:
- NUMBER
- name: address
# description: "{{ doc('tx_count') }}"
# tests:
# - dbt_expectations.expect_column_to_exist
# - not_null
# - dbt_expectations.expect_column_values_to_be_in_type_list:
# column_type_list:
# - STRING
description: "{{ doc('address') }}"
tests:
- dbt_expectations.expect_column_to_exist
- not_null
- name: counter
# description: "{{ doc('previous_hash') }}"
# tests:
# - dbt_expectations.expect_column_to_exist
# - not_null
description: "A counter value associated with the solution."
tests:
- dbt_expectations.expect_column_to_exist
- not_null
- name: epoch_hash
# description: "{{ doc('proving_round') }}"
# tests:
# - dbt_expectations.expect_column_to_exist
# - not_null
# - dbt_expectations.expect_column_values_to_be_in_type_list:
# column_type_list:
# - NUMBER
# - FLOAT
description: "The hash of the epoch in which the solution was found."
tests:
- dbt_expectations.expect_column_to_exist
- not_null
- name: solution_id
# description: "{{ doc('prover_rounds') }}"
# tests:
# - dbt_expectations.expect_column_to_exist
# - not_null
# - dbt_expectations.expect_column_values_to_be_in_type_list:
# column_type_list:
# - VARIANT
description: "{{ doc('solution_id') }}"
tests:
- dbt_expectations.expect_column_to_exist
- not_null
- unique
- name: target
# description: "{{ doc('coinbase_target') }}"
# tests:
# - dbt_expectations.expect_column_to_exist
# - not_null
# - dbt_expectations.expect_column_values_to_be_in_type_list:
# column_type_list:
# - NUMBER
# - FLOAT
description: "The target difficulty for the block solution."
tests:
- dbt_expectations.expect_column_to_exist
- not_null
- name: reward_raw
# description: "{{ doc('cumulative_proof_target') }}"
# tests:
# - dbt_expectations.expect_column_to_exist
# - not_null
# - dbt_expectations.expect_column_values_to_be_in_type_list:
# column_type_list:
# - NUMBER
# - FLOAT
description: "The raw value of the reward for solving the block."
tests:
- dbt_expectations.expect_column_to_exist
- not_null
- name: reward
# description: "{{ doc('cumulative_weight') }}"
# tests:
# - dbt_expectations.expect_column_to_exist
# - not_null
# - dbt_expectations.expect_column_values_to_be_in_type_list:
# column_type_list:
# - NUMBER
# - FLOAT
description: "The processed or formatted value of the reward for solving the block."
tests:
- dbt_expectations.expect_column_to_exist
- name: fact_block_solutions_id
description: '{{ doc("pk") }}'
tests:
@ -108,4 +86,4 @@ models:
description: '{{ doc("modified_timestamp") }}'
tests:
- dbt_expectations.expect_column_to_exist
- not_null
- not_null

View File

@ -1,9 +1,9 @@
{{ config(
materialized = 'incremental',
incremental_predicates = ['DBT_INTERNAL_DEST.block_timestamp::DATE >= (select min(block_timestamp::DATE) from ' ~ generate_tmp_view_name(this) ~ ')'],
unique_key = "fact_blocks_id",
unique_key = ['fact_blocks_id'],
incremental_strategy = 'merge',
merge_exclude_columns = ["inserted_timestamp"],
merge_exclude_columns = ['inserted_timestamp'],
cluster_by = ['block_timestamp::DATE'],
tags = ['core', 'full_test']
) }}

View File

@ -67,12 +67,6 @@ models:
- FLOAT
- name: ROUNDS
description: "{{ doc('prover_rounds') }}"
# tests:
# - dbt_expectations.expect_column_to_exist
# - not_null
# - dbt_expectations.expect_column_values_to_be_in_type_list:
# column_type_list:
# - VARIANT
- name: coinbase_target
description: "{{ doc('coinbase_target') }}"
tests:

View File

@ -1,10 +1,11 @@
{{ config(
materialized = 'incremental',
incremental_predicates = ['DBT_INTERNAL_DEST.block_timestamp::DATE >= (select min(block_timestamp::DATE) from ' ~ generate_tmp_view_name(this) ~ ')'],
unique_key = "tx_id",
unique_key = ['fact_transactions_id'],
incremental_strategy = 'merge',
merge_exclude_columns = ["inserted_timestamp"],
merge_exclude_columns = ['inserted_timestamp'],
cluster_by = ['block_timestamp::DATE'],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(tx_id,fee_msg,execution_msg,deployment_msg,owner_msg,finalize_msg,rejected_msg);",
tags = ['core', 'full_test']
) }}

View File

@ -1,10 +1,11 @@
{{ config(
materialized = 'incremental',
incremental_predicates = ['DBT_INTERNAL_DEST.block_timestamp::DATE >= (select min(block_timestamp::DATE) from ' ~ generate_tmp_view_name(this) ~ ')'],
unique_key = "fact_transitions_id",
unique_key = ['fact_transitions_id'],
incremental_strategy = 'merge',
merge_exclude_columns = ["inserted_timestamp"],
merge_exclude_columns = ['inserted_timestamp'],
cluster_by = ['block_timestamp::DATE'],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(tx_id,transition_id,program_id,function,inputs,outputs);",
tags = ['core', 'full_test']
) }}

View File

@ -0,0 +1,388 @@
{{ config(
materialized = 'ephemeral'
) }}
WITH credits_aleo AS (
SELECT
0 AS deployment_block_id,
'2024-09-04 15:00:00'::TIMESTAMP AS deployment_block_timestamp,
'credits.aleo' AS program_id,
0 AS edition,
'program credits.aleo;
mapping committee:
key as address.public;
value as committee_state.public;
struct committee_state:
is_open as boolean;
commission as u8;
mapping delegated:
key as address.public;
value as u64.public;
mapping metadata:
key as address.public;
value as u32.public;
mapping bonded:
key as address.public;
value as bond_state.public;
struct bond_state:
validator as address;
microcredits as u64;
mapping unbonding:
key as address.public;
value as unbond_state.public;
struct unbond_state:
microcredits as u64;
height as u32;
mapping account:
key as address.public;
value as u64.public;
mapping withdraw:
key as address.public;
value as address.public;
record credits:
owner as address.private;
microcredits as u64.private;
function bond_validator:
input r0 as address.public;
input r1 as u64.public;
input r2 as u8.public;
assert.neq self.signer r0;
gte r1 1000000u64 into r3;
assert.eq r3 true;
gt r2 100u8 into r4;
assert.neq r4 true;
async bond_validator self.signer r0 r1 r2 into r5;
output r5 as credits.aleo/bond_validator.future;
finalize bond_validator:
input r0 as address.public;
input r1 as address.public;
input r2 as u64.public;
input r3 as u8.public;
get.or_use withdraw[r0] r1 into r4;
assert.eq r1 r4;
cast true r3 into r5 as committee_state;
get.or_use committee[r0] r5 into r6;
assert.eq r3 r6.commission;
cast r0 0u64 into r7 as bond_state;
get.or_use bonded[r0] r7 into r8;
assert.eq r8.validator r0;
add r8.microcredits r2 into r9;
cast r0 r9 into r10 as bond_state;
get.or_use delegated[r0] 0u64 into r11;
add r2 r11 into r12;
gte r12 10000000000000u64 into r13;
assert.eq r13 true;
get account[r0] into r14;
sub r14 r2 into r15;
contains committee[r0] into r16;
branch.eq r16 true to validator_in_committee;
set r4 into withdraw[r0];
gte r2 100000000u64 into r17;
assert.eq r17 true;
get.or_use metadata[aleo1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq3ljyzc] 0u32 into r18;
add r18 1u32 into r19;
set r19 into metadata[aleo1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq3ljyzc];
contains unbonding[r0] into r20;
assert.eq r20 false;
position validator_in_committee;
set r6 into committee[r0];
set r12 into delegated[r0];
set r10 into bonded[r0];
set r15 into account[r0];
function bond_public:
input r0 as address.public;
input r1 as address.public;
input r2 as u64.public;
gte r2 1000000u64 into r3;
assert.eq r3 true;
assert.neq self.caller r0;
async bond_public self.caller r0 r1 r2 into r4;
output r4 as credits.aleo/bond_public.future;
finalize bond_public:
input r0 as address.public;
input r1 as address.public;
input r2 as address.public;
input r3 as u64.public;
get.or_use withdraw[r0] r2 into r4;
assert.eq r2 r4;
contains bonded[r0] into r5;
branch.eq r5 true to continue_bond_delegator;
set r2 into withdraw[r0];
cast true 0u8 into r6 as committee_state;
get.or_use committee[r1] r6 into r7;
assert.eq r7.is_open true;
get.or_use metadata[aleo1qgqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqanmpl0] 0u32 into r8;
add r8 1u32 into r9;
lte r9 100000u32 into r10;
assert.eq r10 true;
set r9 into metadata[aleo1qgqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqanmpl0];
position continue_bond_delegator;
cast r1 0u64 into r11 as bond_state;
get.or_use bonded[r0] r11 into r12;
assert.eq r12.validator r1;
add r12.microcredits r3 into r13;
gte r13 10000000000u64 into r14;
assert.eq r14 true;
cast r1 r13 into r15 as bond_state;
get account[r0] into r16;
sub r16 r3 into r17;
get.or_use delegated[r1] 0u64 into r18;
add r3 r18 into r19;
contains unbonding[r1] into r20;
assert.eq r20 false;
set r15 into bonded[r0];
set r17 into account[r0];
set r19 into delegated[r1];
function unbond_public:
input r0 as address.public;
input r1 as u64.public;
async unbond_public self.caller r0 r1 into r2;
output r2 as credits.aleo/unbond_public.future;
finalize unbond_public:
input r0 as address.public;
input r1 as address.public;
input r2 as u64.public;
add block.height 360u32 into r3;
cast 0u64 r3 into r4 as unbond_state;
get bonded[r1] into r5;
get withdraw[r1] into r6;
is.eq r0 r6 into r7;
contains withdraw[r5.validator] into r8;
get.or_use withdraw[r5.validator] aleo1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq3ljyzc into r9;
is.eq r0 r9 into r10;
and r8 r10 into r11;
or r7 r11 into r12;
assert.eq r12 true;
is.eq r5.validator r1 into r13;
branch.eq r13 true to unbond_validator;
get.or_use unbonding[r1] r4 into r14;
get delegated[r5.validator] into r15;
sub r5.microcredits r2 into r16;
lt r16 10000000000u64 into r17;
or r11 r17 into r18;
ternary r18 r5.microcredits r2 into r19;
add r14.microcredits r19 into r20;
cast r20 r3 into r21 as unbond_state;
set r21 into unbonding[r1];
sub r15 r19 into r22;
set r22 into delegated[r5.validator];
branch.eq r18 true to remove_delegator;
cast r5.validator r16 into r23 as bond_state;
set r23 into bonded[r1];
branch.eq true true to end_unbond_delegator;
position remove_delegator;
remove bonded[r1];
get metadata[aleo1qgqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqanmpl0] into r24;
sub r24 1u32 into r25;
set r25 into metadata[aleo1qgqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqanmpl0];
position end_unbond_delegator;
gte r22 10000000000000u64 into r26;
branch.eq r26 true to end;
position unbond_validator;
contains committee[r5.validator] into r27;
nor r13 r27 into r28;
branch.eq r28 true to end;
get committee[r5.validator] into r29;
get bonded[r5.validator] into r30;
get delegated[r5.validator] into r31;
lt r31 10000000000000u64 into r32;
branch.eq r32 true to remove_validator;
sub r31 r2 into r33;
sub r30.microcredits r2 into r34;
gte r34 100000000u64 into r35;
gte r33 10000000000000u64 into r36;
and r35 r36 into r37;
branch.eq r37 false to remove_validator;
get.or_use unbonding[r5.validator] r4 into r38;
add r38.microcredits r2 into r39;
cast r39 r3 into r40 as unbond_state;
set r40 into unbonding[r5.validator];
set r33 into delegated[r5.validator];
cast r5.validator r34 into r41 as bond_state;
set r41 into bonded[r5.validator];
branch.eq true true to end;
position remove_validator;
remove committee[r5.validator];
get metadata[aleo1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq3ljyzc] into r42;
sub r42 1u32 into r43;
set r43 into metadata[aleo1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq3ljyzc];
sub r31 r30.microcredits into r44;
set r44 into delegated[r5.validator];
remove bonded[r5.validator];
get.or_use unbonding[r5.validator] r4 into r45;
add r30.microcredits r45.microcredits into r46;
cast r46 r3 into r47 as unbond_state;
set r47 into unbonding[r5.validator];
position end;
function claim_unbond_public:
input r0 as address.public;
async claim_unbond_public r0 into r1;
output r1 as credits.aleo/claim_unbond_public.future;
finalize claim_unbond_public:
input r0 as address.public;
get unbonding[r0] into r1;
gte block.height r1.height into r2;
assert.eq r2 true;
get withdraw[r0] into r3;
get.or_use account[r3] 0u64 into r4;
add r1.microcredits r4 into r5;
set r5 into account[r3];
remove unbonding[r0];
contains bonded[r0] into r6;
branch.eq r6 true to end;
remove withdraw[r0];
position end;
function set_validator_state:
input r0 as boolean.public;
async set_validator_state self.caller r0 into r1;
output r1 as credits.aleo/set_validator_state.future;
finalize set_validator_state:
input r0 as address.public;
input r1 as boolean.public;
get committee[r0] into r2;
cast r1 r2.commission into r3 as committee_state;
set r3 into committee[r0];
function transfer_public:
input r0 as address.public;
input r1 as u64.public;
async transfer_public self.caller r0 r1 into r2;
output r2 as credits.aleo/transfer_public.future;
finalize transfer_public:
input r0 as address.public;
input r1 as address.public;
input r2 as u64.public;
get account[r0] into r3;
sub r3 r2 into r4;
set r4 into account[r0];
get.or_use account[r1] 0u64 into r5;
add r5 r2 into r6;
set r6 into account[r1];
function transfer_public_as_signer:
input r0 as address.public;
input r1 as u64.public;
async transfer_public_as_signer self.signer r0 r1 into r2;
output r2 as credits.aleo/transfer_public_as_signer.future;
finalize transfer_public_as_signer:
input r0 as address.public;
input r1 as address.public;
input r2 as u64.public;
get account[r0] into r3;
sub r3 r2 into r4;
set r4 into account[r0];
get.or_use account[r1] 0u64 into r5;
add r5 r2 into r6;
set r6 into account[r1];
function transfer_private:
input r0 as credits.record;
input r1 as address.private;
input r2 as u64.private;
sub r0.microcredits r2 into r3;
cast r1 r2 into r4 as credits.record;
cast r0.owner r3 into r5 as credits.record;
output r4 as credits.record;
output r5 as credits.record;
function transfer_private_to_public:
input r0 as credits.record;
input r1 as address.public;
input r2 as u64.public;
sub r0.microcredits r2 into r3;
cast r0.owner r3 into r4 as credits.record;
async transfer_private_to_public r1 r2 into r5;
output r4 as credits.record;
output r5 as credits.aleo/transfer_private_to_public.future;
finalize transfer_private_to_public:
input r0 as address.public;
input r1 as u64.public;
get.or_use account[r0] 0u64 into r2;
add r1 r2 into r3;
set r3 into account[r0];
function transfer_public_to_private:
input r0 as address.private;
input r1 as u64.public;
cast r0 r1 into r2 as credits.record;
async transfer_public_to_private self.caller r1 into r3;
output r2 as credits.record;
output r3 as credits.aleo/transfer_public_to_private.future;
finalize transfer_public_to_private:
input r0 as address.public;
input r1 as u64.public;
get account[r0] into r2;
sub r2 r1 into r3;
set r3 into account[r0];
function join:
input r0 as credits.record;
input r1 as credits.record;
add r0.microcredits r1.microcredits into r2;
cast r0.owner r2 into r3 as credits.record;
output r3 as credits.record;
function split:
input r0 as credits.record;
input r1 as u64.private;
sub r0.microcredits r1 into r2;
sub r2 10000u64 into r3;
cast r0.owner r1 into r4 as credits.record;
cast r0.owner r3 into r5 as credits.record;
output r4 as credits.record;
output r5 as credits.record;
function fee_private:
input r0 as credits.record;
input r1 as u64.public;
input r2 as u64.public;
input r3 as field.public;
assert.neq r1 0u64;
assert.neq r3 0field;
add r1 r2 into r4;
sub r0.microcredits r4 into r5;
cast r0.owner r5 into r6 as credits.record;
output r6 as credits.record;
function fee_public:
input r0 as u64.public;
input r1 as u64.public;
input r2 as field.public;
assert.neq r0 0u64;
assert.neq r2 0field;
add r0 r1 into r3;
async fee_public self.signer r3 into r4;
output r4 as credits.aleo/fee_public.future;
finalize fee_public:
input r0 as address.public;
input r1 as u64.public;
get account[r0] into r2;
sub r2 r1 into r3;
set r3 into account[r0];' AS program,
NULL AS verifying_keys,
{{ dbt_utils.generate_surrogate_key(
['program_id','edition']
) }} AS programs_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
)
SELECT * FROM credits_aleo

View File

@ -32,33 +32,20 @@ FROM
WITH base AS (
SELECT
DATA,
-- IN FINAL
DATA :header AS header,
-- IN FINAL
header :metadata :height :: INT AS block_id,
-- IN FINAL
header :metadata :timestamp :: datetime AS block_timestamp,
DATA :block_hash :: STRING AS block_hash,
DATA :previous_hash :: STRING AS previous_hash,
-- IN FINAL
COALESCE(ARRAY_SIZE(DATA :transactions) :: NUMBER, 0) AS tx_count,
-- IN FINAL
header :metadata :network AS network_id,
header :metadata :coinbase_target :: bigint AS coinbase_target,
header :metadata :cumulative_proof_target :: bigint AS cumulative_proof_target,
header :metadata :cumulative_weight :: bigint AS cumulative_weight,
-- IN FINAL
header :metadata :round :: INT AS ROUND,
-- use to identify address of block producer (validator) -- IN FINAL
object_keys(
DATA :authority :subdag :subdag
) AS rounds,
-- IN FINAL, REPLACES PROPOSER ADDRESS
{# DATA :transactions AS transactions,
-- IN FINAL
DATA :ratifications AS block_rewards,
-- puzzle rewards (provers) and staker rewards (block reward). puzzle rewards are split by weight -- IN FINAL
DATA :solutions :solutions :solutions AS puzzle_solutions -- target is the proportion of prover rewards #}
) AS rounds
FROM
{% if is_incremental() %}

View File

@ -0,0 +1,23 @@
{{ config(
materialized = 'view',
tags = ['core']
) }}
SELECT
system_created_at,
insert_date,
blockchain,
address,
creator,
label_type,
label_subtype,
address_name,
project_name,
modified_timestamp
FROM
{{ source(
'crosschain',
'dim_labels'
) }}
WHERE
blockchain = 'aleo'

View File

@ -46,7 +46,6 @@ SELECT
TRY_PARSE_JSON(
deployment_msg :verifying_keys
) AS verifying_keys,
{# mappings, #}
{{ dbt_utils.generate_surrogate_key(
['program_id','edition']
) }} AS programs_id,

View File

@ -0,0 +1,40 @@
version: 2
models:
- name: silver__programs
description: "This model contains detailed information about programs deployed on the Aleo blockchain."
columns:
- name: deployment_block_id
description: "The ID of the block in which the program was deployed."
tests:
- not_null
- name: deployment_block_timestamp
description: "The timestamp of the block in which the program was deployed."
tests:
- not_null
- name: program_id
description: "The unique identifier of the program."
tests:
- not_null
- unique
- name: edition
description: "The version or edition number of the program."
tests:
- not_null
- name: program
description: "The full source code of the program."
tests:
- not_null
- name: verifying_keys
description: "The verifying keys associated with the program, if any."
- name: programs_id
description: "A unique identifier for each program record."
tests:
- not_null
- unique

View File

@ -1,6 +1,6 @@
{{ config(
materialized = 'incremental',
unique_key = "tx_id",
unique_key = "transactions_id",
incremental_strategy = 'merge',
incremental_predicates = ["COALESCE(DBT_INTERNAL_DEST.block_timestamp::DATE,'2099-12-31') >= (select min(block_timestamp::DATE) from " ~ generate_tmp_view_name(this) ~ ")"],
merge_exclude_columns = ["inserted_timestamp"],

View File

@ -1,6 +1,6 @@
{{ config(
materialized = 'incremental',
unique_key = "transition_id",
unique_key = "transitions_id",
incremental_strategy = 'merge',
incremental_predicates = ["COALESCE(DBT_INTERNAL_DEST.block_timestamp::DATE,'2099-12-31') >= (select min(block_timestamp::DATE) from " ~ generate_tmp_view_name(this) ~ ")"],
merge_exclude_columns = ["inserted_timestamp"],

View File

@ -5,16 +5,13 @@ sources:
database: "{{ 'crosschain' if target.database == 'aleo' else 'crosschain_dev' }}"
schema: core
tables:
- name: dim_date_hours
- name: address_tags
- name: dim_dates
- name: dim_labels
- name: crosschain_silver
database: "{{ 'crosschain' if target.database == 'ALEO' else 'crosschain_dev' }}"
schema: silver
tables:
- name: number_sequence
- name: labels_combined
- name: bronze_streamline
database: streamline
@ -23,6 +20,7 @@ sources:
tables:
- name: blocks
- name: transactions
- name: github_actions
database: ALEO
schema: github_actions

View File

@ -1,55 +0,0 @@
{{ config (
materialized = "incremental",
incremental_strategy = 'merge',
unique_key = "complete_transactions_id",
enabled = false
) }}
-- depends_on: {{ ref('bronze__transactions') }}
WITH transactions AS (
SELECT
VALUE :BLOCK_ID_REQUESTED :: INT AS block_id,
DATA :id :: STRING AS transaction_id,
{{ dbt_utils.generate_surrogate_key(
['VALUE:BLOCK_ID_REQUESTED :: INT', 'DATA :id :: STRING']
) }} AS complete_transactions_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
file_name,
'{{ invocation_id }}' AS _invocation_id,
ROW_NUMBER() over (
PARTITION BY DATA :id
ORDER BY
inserted_timestamp DESC
) AS rn
FROM
{% if is_incremental() %}
{{ ref('bronze__transactions') }}
{% else %}
{{ ref('bronze__transactions_FR') }}
{% endif %}
{% if is_incremental() %}
WHERE
inserted_timestamp >= (
SELECT
MAX(modified_timestamp)
FROM
{{ this }}
)
{% endif %}
)
SELECT
block_id,
transaction_id,
complete_transactions_id,
inserted_timestamp,
modified_timestamp,
file_name,
_invocation_id
FROM
transactions
WHERE
rn = 1
AND block_id IS NOT NULL

View File

@ -1,80 +0,0 @@
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"transactions",
"sql_limit" :"500",
"producer_batch_size" :"100",
"worker_batch_size" :"100",
"sql_source" :"{{this.identifier}}" }
),
enabled = false
) }}
-- depends_on: {{ ref('streamline__transactions_complete') }}
WITH blocks AS (
SELECT
block_id,
block_timestamp,
transactions,
tx_count
FROM
{{ ref("silver__blocks") }}
WHERE
tx_count > 0
),
transaction_ids AS (
SELECT
b.block_id,
b.block_timestamp,
t.value :transaction :id :: STRING AS transaction_id
FROM
blocks b,
TABLE(FLATTEN(PARSE_JSON(transactions))) t
WHERE
t.value :transaction :id IS NOT NULL),
tx_to_pull AS (
SELECT
A.*
FROM
transaction_ids A
LEFT JOIN {{ ref('streamline_transactions_complete') }}
) (
SELECT
block_id,
block_timestamp,
transaction_id,
{{ target.database }}.live.udf_api(
'GET',
'{Service}/transaction/' || transaction_id,
OBJECT_CONSTRUCT(
'Content-Type',
'application/json'
),{},
'Vault/dev/aleo/mainnet'
) AS request,
block_id AS block_id_requested
FROM
transaction_ids
) b
SELECT
ROUND(
block_id,
-4
) :: INT AS partition_key,
block_timestamp,
{{ target.database }}.live.udf_api(
'GET',
'{Service}/transaction/' || transaction_id,
OBJECT_CONSTRUCT(
'Content-Type',
'application/json'
),{},
'Vault/dev/aleo/mainnet'
) AS request,
block_id AS block_id_requested
FROM
transaction_ids
ORDER BY
block_id