GHA modifications

This commit is contained in:
Mike Stepanovic 2025-05-07 12:59:23 -06:00
parent 9ba7cd8fbf
commit 9e4ccd4fcd
9 changed files with 698 additions and 61 deletions

View File

@ -0,0 +1,64 @@
name: dbt_run_adhoc
run-name: ${{ inputs.dbt_command }}
on:
workflow_dispatch:
branches:
- "pre-release/v4-beta"
inputs:
environment:
type: choice
description: DBT Run Environment
required: true
options:
- dev
- prod
default: dev
warehouse:
type: choice
description: Snowflake warehouse
required: true
options:
- DBT
- DBT_CLOUD
default: DBT
dbt_command:
type: string
description: 'DBT Run Command'
required: true
env:
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ inputs.warehouse }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_${{ inputs.environment }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}

View File

@ -0,0 +1,44 @@
name: dbt_run_master_keys
on:
push:
branches:
- "pre-release/v4-beta"
paths:
- 'macros/global/variables/return_vars.sql'
env:
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Write Variables to fsc_evm.admin._master_keys
run: |
dbt run-operation return_vars --vars '{"WRITE_VARS_ENABLED": true}'

96
.github/workflows/dbt_run_template.yml vendored Normal file
View File

@ -0,0 +1,96 @@
name: dbt_run_template
on:
workflow_call:
inputs:
warehouse:
required: false
type: string
default: DBT_CLOUD
target:
required: false
type: string
default: prod
command_name:
required: true
type: string
default: Run DBT Command
command:
required: true
type: string
command_name_2:
required: false
type: string
default: Run DBT Command 2 (if enabled)
command_2:
required: false
type: string
default: ""
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_secrets
steps:
- uses: actions/checkout@v3
- name: Extract project & profile names from dbt_project.yml
id: project-name
run: |
PROFILE_NAME=$(grep "^profile:" dbt_project.yml | sed 's/^profile:[[:space:]]*"//' | sed 's/".*$//')
PROJECT_NAME=$(grep "^name:" dbt_project.yml | sed 's/^name:[[:space:]]*"//' | sed 's/".*$//')
echo "PROFILE_NAME=$PROFILE_NAME" >> $GITHUB_ENV
echo "PROJECT_NAME=$PROJECT_NAME" >> $GITHUB_ENV
echo "PROFILE_NAME: $PROFILE_NAME"
echo "PROJECT_NAME: $PROJECT_NAME"
- name: Set environment variables
run: |
echo "TARGET=${{ inputs.target }}" >> $GITHUB_ENV
echo "ACCOUNT=vna27887.us-east-1" >> $GITHUB_ENV
echo "REGION=us-east-1" >> $GITHUB_ENV
echo "SCHEMA=ADMIN" >> $GITHUB_ENV
echo "USER=DBT_CLOUD_$PROFILE_NAME" >> $GITHUB_ENV
echo "PASSWORD=${{ secrets.PASSWORD }}" >> $GITHUB_ENV
if [[ "${{ inputs.target }}" == *"prod"* ]]; then
echo "DATABASE=$PROFILE_NAME" >> $GITHUB_ENV
echo "ROLE=DBT_CLOUD_$PROFILE_NAME" >> $GITHUB_ENV
echo "WAREHOUSE=${{ inputs.warehouse }}" >> $GITHUB_ENV
elif [[ "${{ inputs.target }}" == *"test"* ]]; then
echo "DATABASE=$PROFILE_NAME" >> $GITHUB_ENV
echo "ROLE=DBT_CLOUD_$PROFILE_NAME" >> $GITHUB_ENV
echo "WAREHOUSE=DBT_TEST" >> $GITHUB_ENV
else
echo "DATABASE=${PROFILE_NAME}_DEV" >> $GITHUB_ENV
echo "ROLE=INTERNAL_DEV" >> $GITHUB_ENV
echo "WAREHOUSE=${{ inputs.warehouse }}" >> $GITHUB_ENV
fi
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: Install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: ${{inputs.command_name}}
run: |
${{ inputs.command }}
- name: ${{inputs.command_name_2}}
if: "${{ inputs.command_2 != '' }}"
run: |
${{ inputs.command_2 }}
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -0,0 +1,77 @@
name: dbt_run_template_alter_all_gha_tasks
on:
workflow_call:
inputs:
task_action:
type: string
description: 'Action to perform on all tasks (RESUME, SUSPEND)'
required: true
default: RESUME
target:
type: string
required: false
default: prod
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_secrets
steps:
- uses: actions/checkout@v3
- name: Extract project & profile names from dbt_project.yml
id: project-name
run: |
PROFILE_NAME=$(grep "^profile:" dbt_project.yml | sed 's/^profile:[[:space:]]*"//' | sed 's/".*$//')
PROJECT_NAME=$(grep "^name:" dbt_project.yml | sed 's/^name:[[:space:]]*"//' | sed 's/".*$//')
echo "PROFILE_NAME=$PROFILE_NAME" >> $GITHUB_ENV
echo "PROJECT_NAME=$PROJECT_NAME" >> $GITHUB_ENV
echo "PROFILE_NAME: $PROFILE_NAME"
echo "PROJECT_NAME: $PROJECT_NAME"
- name: Set environment variables
run: |
echo "TARGET=${{ inputs.target }}" >> $GITHUB_ENV
echo "ACCOUNT=vna27887.us-east-1" >> $GITHUB_ENV
echo "REGION=us-east-1" >> $GITHUB_ENV
echo "SCHEMA=ADMIN" >> $GITHUB_ENV
echo "USER=DBT_CLOUD_$PROFILE_NAME" >> $GITHUB_ENV
echo "PASSWORD=${{ secrets.PASSWORD }}" >> $GITHUB_ENV
if [[ "${{ inputs.target }}" == *"prod"* ]]; then
echo "DATABASE=$PROFILE_NAME" >> $GITHUB_ENV
echo "ROLE=DBT_CLOUD_$PROFILE_NAME" >> $GITHUB_ENV
echo "WAREHOUSE=DBT_CLOUD" >> $GITHUB_ENV
elif [[ "${{ inputs.target }}" == *"test"* ]]; then
echo "DATABASE=$PROFILE_NAME" >> $GITHUB_ENV
echo "ROLE=DBT_CLOUD_$PROFILE_NAME" >> $GITHUB_ENV
echo "WAREHOUSE=DBT_TEST" >> $GITHUB_ENV
else
echo "DATABASE=${PROFILE_NAME}_DEV" >> $GITHUB_ENV
echo "ROLE=INTERNAL_DEV" >> $GITHUB_ENV
echo "WAREHOUSE=DBT_CLOUD" >> $GITHUB_ENV
fi
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: Install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Alter all GHA tasks
run: |
dbt run-operation fsc_evm.alter_all_gha_tasks --args '{"task_action": "${{ inputs.task_action }}"}'
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -0,0 +1,83 @@
name: dbt_run_template_alter_gha_tasks
on:
workflow_call:
inputs:
workflow_name:
type: string
description: 'Name of the workflow to perform the action on, no .yml extension'
required: true
task_action:
type: string
description: 'Action to perform'
required: true
target:
type: string
required: false
default: prod
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_secrets
steps:
- uses: actions/checkout@v3
- name: Extract project & profile names from dbt_project.yml
id: project-name
run: |
PROFILE_NAME=$(grep "^profile:" dbt_project.yml | sed 's/^profile:[[:space:]]*"//' | sed 's/".*$//')
PROJECT_NAME=$(grep "^name:" dbt_project.yml | sed 's/^name:[[:space:]]*"//' | sed 's/".*$//')
echo "PROFILE_NAME=$PROFILE_NAME" >> $GITHUB_ENV
echo "PROJECT_NAME=$PROJECT_NAME" >> $GITHUB_ENV
echo "PROFILE_NAME: $PROFILE_NAME"
echo "PROJECT_NAME: $PROJECT_NAME"
- name: Set environment variables
run: |
echo "TARGET=${{ inputs.target }}" >> $GITHUB_ENV
echo "ACCOUNT=vna27887.us-east-1" >> $GITHUB_ENV
echo "REGION=us-east-1" >> $GITHUB_ENV
echo "SCHEMA=ADMIN" >> $GITHUB_ENV
echo "USER=DBT_CLOUD_$PROFILE_NAME" >> $GITHUB_ENV
echo "PASSWORD=${{ secrets.PASSWORD }}" >> $GITHUB_ENV
if [[ "${{ inputs.target }}" == *"prod"* ]]; then
echo "DATABASE=$PROFILE_NAME" >> $GITHUB_ENV
echo "ROLE=DBT_CLOUD_$PROFILE_NAME" >> $GITHUB_ENV
echo "WAREHOUSE=DBT_CLOUD" >> $GITHUB_ENV
elif [[ "${{ inputs.target }}" == *"test"* ]]; then
echo "DATABASE=$PROFILE_NAME" >> $GITHUB_ENV
echo "ROLE=DBT_CLOUD_$PROFILE_NAME" >> $GITHUB_ENV
echo "WAREHOUSE=DBT_TEST" >> $GITHUB_ENV
else
echo "DATABASE=${PROFILE_NAME}_DEV" >> $GITHUB_ENV
echo "ROLE=INTERNAL_DEV" >> $GITHUB_ENV
echo "WAREHOUSE=DBT_CLOUD" >> $GITHUB_ENV
fi
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: Install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Set up workflow name
run: echo "WORKFLOW_NAME_UPPER=$(echo '${{ inputs.workflow_name }}' | tr '[:lower:]' '[:upper:]')" >> $GITHUB_ENV
- name: Run DBT Jobs
run: |
dbt run-operation fsc_evm.alter_gha_tasks --args '{"task_names": "TRIGGER_${{ env.WORKFLOW_NAME_UPPER }}", "task_action": "${{ inputs.task_action }}"}'
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -0,0 +1,109 @@
name: dbt_run_template_dev_refresh
on:
workflow_call:
jobs:
run_dbt_jobs_refresh:
runs-on: ubuntu-latest
environment:
name: workflow_secrets
steps:
- uses: actions/checkout@v3
- name: Extract project & profile names from dbt_project.yml
id: project-name
run: |
PROFILE_NAME=$(grep "^profile:" dbt_project.yml | sed 's/^profile:[[:space:]]*"//' | sed 's/".*$//')
PROJECT_NAME=$(grep "^name:" dbt_project.yml | sed 's/^name:[[:space:]]*"//' | sed 's/".*$//')
echo "PROFILE_NAME=$PROFILE_NAME" >> $GITHUB_ENV
echo "PROJECT_NAME=$PROJECT_NAME" >> $GITHUB_ENV
echo "PROFILE_NAME: $PROFILE_NAME"
echo "PROJECT_NAME: $PROJECT_NAME"
- name: Set production environment variables
run: |
echo "TARGET=prod" >> $GITHUB_ENV
echo "ACCOUNT=vna27887.us-east-1" >> $GITHUB_ENV
echo "REGION=us-east-1" >> $GITHUB_ENV
echo "SCHEMA=ADMIN" >> $GITHUB_ENV
echo "DATABASE=$PROFILE_NAME" >> $GITHUB_ENV
echo "ROLE=DBT_CLOUD_$PROFILE_NAME" >> $GITHUB_ENV
echo "USER=DBT_CLOUD_$PROFILE_NAME" >> $GITHUB_ENV
echo "WAREHOUSE=DBT_CLOUD" >> $GITHUB_ENV
echo "PASSWORD=${{ secrets.PASSWORD }}" >> $GITHUB_ENV
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: Install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run Dev Refresh
run: |
dbt run-operation fsc_evm.run_sp_create_prod_clone
notify-failure:
needs: [run_dbt_jobs_refresh]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
run_dbt_jobs_udfs:
runs-on: ubuntu-latest
needs: run_dbt_jobs_refresh
environment:
name: workflow_secrets
steps:
- uses: actions/checkout@v3
- name: Extract project & profile names from dbt_project.yml
id: project-name
run: |
PROFILE_NAME=$(grep "^profile:" dbt_project.yml | sed 's/^profile:[[:space:]]*"//' | sed 's/".*$//')
PROJECT_NAME=$(grep "^name:" dbt_project.yml | sed 's/^name:[[:space:]]*"//' | sed 's/".*$//')
echo "PROFILE_NAME=$PROFILE_NAME" >> $GITHUB_ENV
echo "PROJECT_NAME=$PROJECT_NAME" >> $GITHUB_ENV
echo "PROFILE_NAME: $PROFILE_NAME"
echo "PROJECT_NAME: $PROJECT_NAME"
- name: Set dev environment variables
run: |
echo "TARGET=dev" >> $GITHUB_ENV
echo "ACCOUNT=vna27887.us-east-1" >> $GITHUB_ENV
echo "REGION=us-east-1" >> $GITHUB_ENV
echo "SCHEMA=ADMIN" >> $GITHUB_ENV
echo "DATABASE=${PROFILE_NAME}_DEV" >> $GITHUB_ENV
echo "ROLE=INTERNAL_DEV" >> $GITHUB_ENV
echo "USER=DBT_CLOUD_$PROFILE_NAME" >> $GITHUB_ENV
echo "WAREHOUSE=DBT" >> $GITHUB_ENV
echo "PASSWORD=${{ secrets.PASSWORD }}" >> $GITHUB_ENV
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: Install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run Recreate UDFs
run: |
dbt run-operation fsc_evm.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
dbt run -s livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
notify-failure-2:
needs: [run_dbt_jobs_udfs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -0,0 +1,87 @@
name: dbt_run_template_docs_update
on:
workflow_call:
jobs:
run_dbt_jobs_refresh:
runs-on: ubuntu-latest
environment:
name: workflow_secrets
steps:
- uses: actions/checkout@v3
- name: Extract project & profile names from dbt_project.yml
id: project-name
run: |
PROFILE_NAME=$(grep "^profile:" dbt_project.yml | sed 's/^profile:[[:space:]]*"//' | sed 's/".*$//')
PROJECT_NAME=$(grep "^name:" dbt_project.yml | sed 's/^name:[[:space:]]*"//' | sed 's/".*$//')
echo "PROFILE_NAME=$PROFILE_NAME" >> $GITHUB_ENV
echo "PROJECT_NAME=$PROJECT_NAME" >> $GITHUB_ENV
echo "PROFILE_NAME: $PROFILE_NAME"
echo "PROJECT_NAME: $PROJECT_NAME"
- name: Set production environment variables
run: |
echo "TARGET=prod" >> $GITHUB_ENV
echo "ACCOUNT=vna27887.us-east-1" >> $GITHUB_ENV
echo "REGION=us-east-1" >> $GITHUB_ENV
echo "SCHEMA=ADMIN" >> $GITHUB_ENV
echo "DATABASE=$PROFILE_NAME" >> $GITHUB_ENV
echo "ROLE=DBT_CLOUD_$PROFILE_NAME" >> $GITHUB_ENV
echo "USER=DBT_CLOUD_$PROFILE_NAME" >> $GITHUB_ENV
echo "WAREHOUSE=DBT_CLOUD" >> $GITHUB_ENV
echo "PASSWORD=${{ secrets.PASSWORD }}" >> $GITHUB_ENV
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: Install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: refresh ddl for datashare
run: |
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
- name: checkout docs branch
run: |
git checkout -B docs origin/main
- name: generate dbt docs
run: dbt docs generate -t prod
- name: move files to docs directory
run: |
mkdir -p ./docs
cp target/{catalog.json,manifest.json,index.html} docs/
- name: clean up target directory
run: dbt clean
- name: check for changes
run: git status
- name: stage changed files
run: git add .
- name: commit changed files
run: |
git config user.email "abc@xyz"
git config user.name "github-actions"
git commit -am "Auto-update docs"
- name: push changes to docs
run: |
git push -f --set-upstream origin docs
notify-failure:
needs: [run_dbt_jobs_refresh]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -0,0 +1,84 @@
name: dbt_run_template
on:
workflow_call:
inputs:
warehouse:
required: false
type: string
default: DBT_CLOUD
target:
required: false
type: string
default: dev
command:
required: true
type: string
python_version:
required: false
type: string
default: "3.10"
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_secrets
steps:
- uses: actions/checkout@v3
- name: Extract project & profile names from dbt_project.yml
id: project-name
run: |
PROFILE_NAME=$(grep "^profile:" dbt_project.yml | sed 's/^profile:[[:space:]]*"//' | sed 's/".*$//')
PROJECT_NAME=$(grep "^name:" dbt_project.yml | sed 's/^name:[[:space:]]*"//' | sed 's/".*$//')
echo "PROFILE_NAME=$PROFILE_NAME" >> $GITHUB_ENV
echo "PROJECT_NAME=$PROJECT_NAME" >> $GITHUB_ENV
echo "PROFILE_NAME: $PROFILE_NAME"
echo "PROJECT_NAME: $PROJECT_NAME"
- name: Set environment variables
run: |
echo "TARGET=${{ inputs.target }}" >> $GITHUB_ENV
echo "ACCOUNT=vna27887.us-east-1" >> $GITHUB_ENV
echo "REGION=us-east-1" >> $GITHUB_ENV
echo "SCHEMA=ADMIN" >> $GITHUB_ENV
echo "USER=DBT_CLOUD_$PROFILE_NAME" >> $GITHUB_ENV
echo "PASSWORD=${{ secrets.PASSWORD }}" >> $GITHUB_ENV
if [[ "${{ inputs.target }}" == *"prod"* ]]; then
echo "DATABASE=$PROFILE_NAME" >> $GITHUB_ENV
echo "ROLE=DBT_CLOUD_$PROFILE_NAME" >> $GITHUB_ENV
echo "WAREHOUSE=${{ inputs.warehouse }}" >> $GITHUB_ENV
elif [[ "${{ inputs.target }}" == *"test"* ]]; then
echo "DATABASE=$PROFILE_NAME" >> $GITHUB_ENV
echo "ROLE=DBT_CLOUD_$PROFILE_NAME" >> $GITHUB_ENV
echo "WAREHOUSE=DBT_TEST" >> $GITHUB_ENV
else
echo "DATABASE=${PROFILE_NAME}_DEV" >> $GITHUB_ENV
echo "ROLE=INTERNAL_DEV" >> $GITHUB_ENV
echo "WAREHOUSE=${{ inputs.warehouse }}" >> $GITHUB_ENV
fi
- uses: actions/setup-python@v4
with:
python-version: ${{ inputs.python_version }}
cache: "pip"
- name: Install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.command }}
- name: Store logs
uses: actions/upload-artifact@v4
with:
name: dbt-logs
path: |
logs
target

View File

@ -1,77 +1,70 @@
{% macro streamline_external_table_query_v2(
model,
partition_function,
partition_name,
other_cols
) %}
WITH meta AS (
partition_function
) %}
{% set days = var("BRONZE_LOOKBACK_DAYS")%}
WITH meta AS (
SELECT
LAST_MODIFIED::timestamp_ntz AS _inserted_timestamp,
last_modified AS inserted_timestamp,
file_name,
{{ partition_function }} AS {{ partition_name }}
{{ partition_function }} AS partition_key
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
start_time => DATEADD('day', -ABS({{days}}), CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", model) }}')
) A
)
SELECT
{{ other_cols }},
_inserted_timestamp,
s.{{ partition_name }},
s.value AS VALUE,
file_name
FROM
)
SELECT
s.*,
b.file_name,
inserted_timestamp
FROM
{{ source(
"bronze_streamline",
model
) }}
s
JOIN
meta b
JOIN meta b
ON b.file_name = metadata$filename
AND b.{{ partition_name }} = s.{{ partition_name }}
WHERE
b.{{ partition_name }} = s.{{ partition_name }}
AND b.partition_key = s.partition_key
WHERE
b.partition_key = s.partition_key
AND DATA :error IS NULL
{% endmacro %}
{% macro streamline_external_table_FR_query_v2(
model,
partition_function,
partition_name,
other_cols
) %}
WITH meta AS (
partition_function
) %}
WITH meta AS (
SELECT
LAST_MODIFIED::timestamp_ntz AS _inserted_timestamp,
registered_on AS inserted_timestamp,
file_name,
{{ partition_function }} AS {{ partition_name }}
{{ partition_function }} AS partition_key
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", model) }}'
)
) A
)
)
SELECT
{{ other_cols }},
_inserted_timestamp,
s.{{ partition_name }},
s.value AS VALUE,
file_name
s.*,
b.file_name,
inserted_timestamp
FROM
{{ source(
"bronze_streamline",
model
) }}
s
JOIN
meta b
JOIN meta b
ON b.file_name = metadata$filename
AND b.{{ partition_name }} = s.{{ partition_name }}
AND b.partition_key = s.partition_key
WHERE
b.{{ partition_name }} = s.{{ partition_name }}
b.partition_key = s.partition_key
AND DATA :error IS NULL
{% endmacro %}