crosschain-models/dbt_project.yml
eric-laurello 5e048aedaf
AN-6030 applying verified tag and cmc/cg metadata (#464)
* add token metadata/legit score model

* add blockchain score summary dim

* add comments

* add osmos

* update to boolean

* v1

* .

* add other chains

* add chains

* tweaks

* check

* format

* updates - only use metadata once

* add token metadata and percentiles ymls

* use the new tokens table and not dim contracts

* rollback prices updates

* .

* .

* com

* checks

* updts

* more updates

* test upd

* gold, is_verified, more cleanup

* exclude token, add check to make sure enhanced and all doesnt overlap

* fix is_ver and add SO on the d+i field

* update metadata threshold metrics

* update spike ratio metrics

* rounds

* FR for newly verified tokens

* Add manual mapping and override
Add test for unmapped but verified tokens

* fix test

* update scores, add contract creation date, update weights

* ver mod ts

* .

* final check in

* aptos 0xa

* evm mapped tokens

* upd near and flow verified token mapping

* aptos,maya,tho

* test serv -> warn

---------

Co-authored-by: mattromano <mattromano88@gmail.com>
Co-authored-by: Austin <austin@flipsidecrypto.com>
Co-authored-by: Jack Forgash <58153492+forgxyz@users.noreply.github.com>
2025-06-18 15:22:03 -04:00

69 lines
2.3 KiB
YAML

# Name your project! Project names should contain only lowercase characters
# and underscores. A good package name should reflect your organization's
# name or the intended use of these models
name: "crosschain_models"
version: "1.0.0"
config-version: 2
# This setting configures which "profile" dbt uses for this project.
profile: "crosschain"
# These configurations specify where dbt should look for different types of files.
# The `source-paths` config, for example, states that models in this project can be
# found in the "models/" directory. You probably won't need to change these!
model-paths: ["models"]
analysis-paths: ["analysis"]
test-paths: ["tests"]
seed-paths: ["data"]
macro-paths: ["macros"]
snapshot-paths: ["snapshots"]
target-path: "target" # directory which will store compiled SQL files
clean-targets: # directories to be removed by `dbt clean`
- "target"
- "dbt_packages"
tests:
+store_failures: true # all tests
on-run-start:
- "{{ create_sps() }}"
- '{{ create_udfs() }}'
- '{{ sp_refresh_external_table_by_recent_date() }}'
# Configuring models
# Full documentation: https://docs.getdbt.com/docs/configuring-models
models:
+copy_grants: true
+persist_docs:
relation: true
columns: true
+on_schema_change: "append_new_columns"
# In this example config, we tell dbt to build all models in the example/ directory
# as tables. These settings can be overridden in the individual model files
# using the `{{ config(...) }}` macro.
vars:
"dbt_date:time_zone": GMT
STREAMLINE_INVOKE_STREAMS: False
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
UPDATE_UDFS_AND_SPS: False
HEAL_MODEL: False
HEAL_MODELS: []
START_GHA_TASKS: False
API_INTEGRATION: '{{ var("config")[target.name]["API_INTEGRATION"] if var("config")[target.name] else var("config")["dev"]["API_INTEGRATION"] }}'
EXTERNAL_FUNCTION_URI: '{{ var("config")[target.name]["EXTERNAL_FUNCTION_URI"] if var("config")[target.name] else var("config")["dev"]["EXTERNAL_FUNCTION_URI"] }}'
ROLES: |
["INTERNAL_DEV"]
config:
# The keys correspond to dbt profiles and are case sensitive
dev:
API_INTEGRATION: AWS_CROSSCHAIN_API_STG
EXTERNAL_FUNCTION_URI: q0bnjqvs9a.execute-api.us-east-1.amazonaws.com/stg/
prod:
API_INTEGRATION: AWS_CROSSCHAIN_API_PROD
EXTERNAL_FUNCTION_URI: y4vgsb5jk5.execute-api.us-east-1.amazonaws.com/prod/