init setup

This commit is contained in:
Jack Forgash 2023-06-07 12:17:41 -06:00
parent 611eb9c357
commit a0fc417f39
11 changed files with 62 additions and 63 deletions

View File

@ -22,48 +22,6 @@ concurrency:
group: ${{ github.workflow }}
jobs:
scheduled_run:
name: docs_update
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
with:
python-version: "3.7.x"
- name: install dependencies
run: |
pip install dbt-snowflake==${{ vars.DBT_VERSION }}
dbt deps
- name: checkout docs branch
run: |
git checkout -b docs origin/main
- name: generate dbt docs
run: dbt docs generate -t prod
- name: move files to docs directory
run: |
mkdir -p ./docs
cp target/{catalog.json,manifest.json,index.html} docs/
- name: clean up target directory
run: dbt clean
- name: check for changes
run: git status
- name: stage changed files
run: git add .
- name: commit changed files
run: |
git config user.email "abc@xyz"
git config user.name "github-actions"
git commit -am "Auto-update docs"
- name: push changes to docs
run: |
git push -f --set-upstream origin docs
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_docs_updates.yml@main
secrets: inherit

View File

@ -63,4 +63,4 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}
${{ inputs.dbt_command }}

View File

@ -3,9 +3,9 @@ run-name: dbt_run_dev_refresh
on:
workflow_dispatch:
# schedule:
# # Runs "at 9:00 UTC" (see https://crontab.guru)
# - cron: '0 9 * * *'
schedule:
# Runs "at 9:00 UTC" (see https://crontab.guru)
- cron: '0 9 * * *'
env:
USE_VARS: "${{ vars.USE_VARS }}"

35
.github/workflows/dbt_run_scheduled.yml vendored Normal file
View File

@ -0,0 +1,35 @@
name: dbt_run_scheduled
run-name: dbt_run_scheduled
on:
workflow_dispatch:
# schedule:
# TODO - enable once pipeline created
# Runs "every hour" (see https://crontab.guru)
# - cron: '0 */1 * * *'
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_run_template.yml@main
with:
dbt_command: >
dbt run
environment: workflow_prod
warehouse: ${{ vars.WAREHOUSE }}
secrets: inherit

4
.gitignore vendored
View File

@ -15,4 +15,6 @@ logs/
.vscode/
.env
.DS_Store
.user.yml
.user.yml
local/

View File

@ -1,5 +1,4 @@
# Please find and replace all instances of `xyz` with your project name.
# Aurora Models
## Profile Set Up
@ -7,7 +6,7 @@
----
```yml
xyz:
aurora:
target: dev
outputs:
dev:
@ -17,7 +16,7 @@ xyz:
user: <USERNAME>
password: <PASSWORD>
region: <REGION>
database: xyz_DEV
database: AURORA_DEV
warehouse: <WAREHOUSE>
schema: silver
threads: 4
@ -73,5 +72,5 @@ dbt run --var '{"UPDATE_SNOWFLAKE_TAGS":True}' -s models/core/core__fact_swaps.s
```
select *
from table(xyz.information_schema.tag_references('xyz.core.fact_blocks', 'table'));
from table(auruora.information_schema.tag_references('aurora.core.fact_blocks', 'table'));
```

View File

@ -1,14 +1,14 @@
# Name your project! Project names should contain only lowercase characters
# and underscores. A good package name should reflect your organization's
# name or the intended use of these models
name: "xyz_models"
name: "aurora_models"
version: "1.0.0"
config-version: 2
require-dbt-version: ">=1.4.0"
# This setting configures which "profile" dbt uses for this project.
profile: "xyz"
profile: "aurora"
# These configurations specify where dbt should look for different types of files.
# The `model-paths` config, for example, states that models in this project can be
@ -42,6 +42,7 @@ models:
vars:
"dbt_date:time_zone": GMT
"UPDATE_SNOWFLAKE_TAGS": TRUE
"UPDATE_UDFS_AND_SPS": FALSE
tests:
+store_failures: true # all tests
@ -51,4 +52,4 @@ on-run-start:
- '{{create_udfs()}}'
on-run-end:
- '{{ apply_meta_as_tags(results) }}'
- '{{ apply_meta_as_tags(results) }}'

View File

@ -1,6 +1,6 @@
{% macro create_sps() %}
{% if target.database == 'xyz' %}
{% if target.database == 'AURORA' %}
CREATE SCHEMA IF NOT EXISTS _internal;
{{ sp_create_prod_clone('_internal') }};
{% endif %}
{% endmacro %}
{% endmacro %}

View File

@ -1,2 +1,6 @@
{% macro create_udfs() %}
{% if var("UPDATE_UDFS_AND_SPS") %}
{{- fsc_utils.create_udfs() -}}
{% endif %}
{% endif %}
{% endmacro %}

View File

@ -2,4 +2,4 @@ packages:
- package: calogica/dbt_expectations
version: [">=0.4.0", "<0.9.0"]
- git: https://github.com/FlipsideCrypto/fsc-utils.git
revision: v1.3.0
revision: v1.3.0

View File

@ -1,4 +1,4 @@
xyz:
aurora:
target: dev
outputs:
dev:
@ -26,4 +26,4 @@ xyz:
threads: 8
client_session_keep_alive: False
config:
send_anonymous_usage_stats: False
send_anonymous_usage_stats: False