turn on the transactions realtime model and update the ad hoc job running method (#140)

* updated

* updated
This commit is contained in:
xiuy001 2023-10-16 11:14:15 -04:00 committed by GitHub
parent 215d21886b
commit 10a6697fb3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 3 additions and 7 deletions

View File

@ -46,9 +46,6 @@ jobs:
runs-on: ubuntu-latest
environment:
name: workflow_${{ inputs.environment }}
strategy:
matrix:
command: ${{fromJson(inputs.dbt_command)}}
steps:
- uses: actions/checkout@v3
@ -63,4 +60,4 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
${{ matrix.command }}
${{ inputs.dbt_command }}

View File

@ -27,8 +27,7 @@ jobs:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_run_template.yml@main
with:
dbt_command: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/streamline/core/realtime/streamline__tx_search_realtime.sql
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/streamline/core/realtime/streamline__transactions_realtime.sql
environment: workflow_prod
warehouse: ${{ vars.WAREHOUSE }}
secrets: inherit

View File

@ -1,7 +1,7 @@
{{ config (
materialized = "view",
post_hook = if_data_call_function(
func = "{{this.schema}}.udf_bulk_rest_api(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'transactions', 'sql_limit', {{var('sql_limit','2000000')}}, 'producer_batch_size', {{var('producer_batch_size','200')}}, 'worker_batch_size', {{var('worker_batch_size','20')}}, 'exploded_key', '[\"txs;tx_responses\"]'))",
func = "{{this.schema}}.udf_bulk_rest_api(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'transactions', 'sql_limit', {{var('sql_limit','10000')}}, 'producer_batch_size', {{var('producer_batch_size','200')}}, 'worker_batch_size', {{var('worker_batch_size','20')}}, 'exploded_key', '[\"txs;tx_responses\"]'))",
target = "{{this.schema}}.{{this.identifier}}"
)
) }}