streamline models and misc

This commit is contained in:
Eric Laurello 2024-10-08 15:12:59 -04:00
parent 3a2d48cd0a
commit 6aad0ff961
10 changed files with 202 additions and 12 deletions

71
.github/workflows/dbt_docs_update.yml vendored Normal file
View File

@ -0,0 +1,71 @@
name: docs_update
on:
push:
branches:
- "main"
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: checkout docs branch
run: |
git checkout -B docs origin/main
- name: generate dbt docs
run: |
dbt ls -t prod
dbt docs generate --no-compile -t prod
- name: move files to docs directory
run: |
mkdir -p ./docs
cp target/{catalog.json,manifest.json,index.html} docs/
- name: clean up target directory
run: dbt clean
- name: check for changes
run: git status
- name: stage changed files
run: git add .
- name: commit changed files
run: |
git config user.email "abc@eclipse"
git config user.name "github-actions"
git commit -am "Auto-update docs"
- name: push changes to docs
run: |
git push -f --set-upstream origin docs

View File

@ -0,0 +1,46 @@
name: dbt_run_streamline_blocks_realtime
run-name: dbt_run_streamline_blocks_realtime
on:
workflow_dispatch:
branches:
- "main"
schedule:
- cron: '0 * * * *'
env:
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -s 2+models/streamline/silver/core/realtime/streamline__blocks_realtime.sql --vars '{STREAMLINE_INVOKE_STREAMS: True}'

18
.gitignore vendored Normal file
View File

@ -0,0 +1,18 @@
target/
dbt_modules/
# newer versions of dbt use this directory instead of dbt_modules for test dependencies
dbt_packages/
logs/
.venv/
.python-version
# Visual Studio Code files
*/.vscode
*.code-workspace
.history/
**/.DS_Store
.vscode/
.env
.DS_Store
.user.yml

View File

@ -0,0 +1,44 @@
{% macro get_merge_sql(
target,
source,
unique_key,
dest_columns,
incremental_predicates
) -%}
{% set predicate_override = "" %}
{% if incremental_predicates [0] == "dynamic_range" %}
-- run some queries to dynamically determine the min + max of this 'input_column' in the new data
{% set input_column = incremental_predicates [1] %}
{% set get_limits_query %}
SELECT
MIN(
{{ input_column }}
) AS lower_limit,
MAX(
{{ input_column }}
) AS upper_limit
FROM
{{ source }}
{% endset %}
{% set limits = run_query(get_limits_query) [0] %}
{% set lower_limit,
upper_limit = limits [0],
limits [1] %}
-- use those calculated min + max values to limit 'target' scan, to only the days with new data
{% set predicate_override %}
dbt_internal_dest.{{ input_column }} BETWEEN '{{ lower_limit }}'
AND '{{ upper_limit }}' {% endset %}
{% endif %}
{% set predicates = [predicate_override] if predicate_override else incremental_predicates %}
-- standard merge from here
{% set merge_sql = dbt.get_merge_sql(
target,
source,
unique_key,
dest_columns,
predicates
) %}
{{ return(merge_sql) }}
{% endmacro %}

View File

@ -0,0 +1,8 @@
{% macro dbt_snowflake_get_tmp_relation_type(
strategy,
unique_key,
language
) %}
-- always table
{{ return('table') }}
{% endmacro %}

View File

@ -1,10 +1,10 @@
{% macro add_database_or_schema_tags() %}
{{ set_database_tag_value(
'BLOCKCHAIN_NAME',
'aleo'
'ALEO'
) }}
{{ set_database_tag_value(
'BLOCKCHAIN_TYPE',
'IBC'
'ZK'
) }}
{% endmacro %}

View File

@ -4,7 +4,7 @@
## **What does this documentation cover?**
The documentation included here details the design of the aleo
tables and views available via [Flipside Crypto.](https://flipsidecrypto.aleo/) For more information on how these models are built, please see [the github repository.](https://github.com/flipsideCrypto/aleo-models/)
tables and views available via [Flipside Crypto.](https://flipsidecrypto.xyz/) For more information on how these models are built, please see [the github repository.](https://github.com/flipsideCrypto/aleo-models/)
## **How do I use these docs?**
The easiest way to navigate this documentation is to use the Quick Links below. These links will take you to the documentation for each table, which contains a description, a list of the columns, and other helpful information.
@ -65,7 +65,7 @@ Note that you can also right-click on models to interactively filter and explore
### **More information**
- [Flipside](https://flipsidecrypto.aleo/)
- [Flipside](https://flipsidecrypto.xyz/)
- [Velocity](https://app.flipsidecrypto.com/velocity?nav=Discover)
- [Tutorials](https://docs.flipsidecrypto.com/our-data/tutorials)
- [Github](https://github.com/FlipsideCrypto/aleo-models)

View File

@ -4,10 +4,11 @@
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"blocks",
"sql_limit" :"10000",
"producer_batch_size" :"1000",
"sql_limit" :"3000",
"producer_batch_size" :"100",
"worker_batch_size" :"100",
"sql_source" :"{{this.identifier}}" }
"sql_source" :"{{this.identifier}}",
"order_by_column": "block_number",}
)
) }}
-- depends_on: {{ ref('streamline__blocks_complete') }}
@ -36,8 +37,7 @@ SELECT
'application/json'
),{},
'Vault/dev/aleo/mainnet'
) AS request
) AS request,
block_number
FROM
blocks
ORDER BY
block_number

View File

@ -17,3 +17,6 @@ WHERE
FROM
{{ ref('streamline__chainhead') }}
)
UNION ALL
SELECT
0

View File

@ -1,4 +1,4 @@
xyz:
aleo:
target: dev
outputs:
dev: