mirror of
https://github.com/FlipsideCrypto/external-models.git
synced 2026-02-06 13:57:17 +00:00
optimize calls (#15)
This commit is contained in:
parent
fa5cbfd3de
commit
2535a46b7e
44
.github/workflows/dbt_run_bi_hourly.yml
vendored
44
.github/workflows/dbt_run_bi_hourly.yml
vendored
@ -1,44 +0,0 @@
|
||||
name: dbt_run_scheduled_bi_hourly
|
||||
run-name: dbt_run_scheduled_bi_hourly
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs "every 2 hours" (see https://crontab.guru)
|
||||
- cron: '0 1-23/2 * * *'
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ${{ vars.DBT_PROFILES_DIR }}
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: "3.7.x"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m models/deepnftvalue models/silver__dates.sql --exclude models/deepnftvalue/bronze/bronze__dnv_historical_valuations.sql
|
||||
2
.github/workflows/dbt_run_daily.yml
vendored
2
.github/workflows/dbt_run_daily.yml
vendored
@ -41,4 +41,4 @@ jobs:
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m tag:defillama
|
||||
dbt run -m tag:defillama models/deepnftvalue models/silver__dates.sql --exclude models/deepnftvalue/bronze/bronze__dnv_historical_valuations.sql
|
||||
4
.github/workflows/dbt_run_temp_backfill.yml
vendored
4
.github/workflows/dbt_run_temp_backfill.yml
vendored
@ -4,8 +4,8 @@ run-name: dbt_run_temp_backfill
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Runs "every 5 mins" (see https://crontab.guru)
|
||||
- cron: '*/5 * * * *'
|
||||
# Runs "every 20 mins" (see https://crontab.guru)
|
||||
- cron: '*/20 * * * *'
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ${{ vars.DBT_PROFILES_DIR }}
|
||||
|
||||
@ -24,7 +24,7 @@ WHERE
|
||||
ORDER BY
|
||||
date_day DESC
|
||||
LIMIT
|
||||
3
|
||||
30
|
||||
), api_key AS (
|
||||
SELECT
|
||||
CONCAT(
|
||||
@ -39,19 +39,43 @@ LIMIT
|
||||
) }}
|
||||
WHERE
|
||||
api_name = 'deepnftvalue'
|
||||
)
|
||||
),
|
||||
row_nos AS (
|
||||
SELECT
|
||||
api_url,
|
||||
ROW_NUMBER () over (
|
||||
ORDER BY
|
||||
api_url
|
||||
) AS row_no,
|
||||
FLOOR(
|
||||
row_no / 2
|
||||
) + 1 AS batch_no,
|
||||
header
|
||||
FROM
|
||||
requests
|
||||
JOIN api_key
|
||||
ON 1 = 1
|
||||
),
|
||||
batched AS ({% for item in range(15) %}
|
||||
SELECT
|
||||
ethereum.streamline.udf_api(' GET ', api_url, PARSE_JSON(header),{}) AS resp,
|
||||
SYSDATE() _inserted_timestamp,
|
||||
ethereum.streamline.udf_api(' GET ', api_url, PARSE_JSON(header),{}) AS resp, api_url, SYSDATE() _inserted_timestamp
|
||||
FROM
|
||||
row_nos rn
|
||||
WHERE
|
||||
batch_no = {{ item }}
|
||||
AND EXISTS (
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
row_nos
|
||||
LIMIT
|
||||
1) {% if not loop.last %}
|
||||
UNION ALL
|
||||
{% endif %}
|
||||
{% endfor %})
|
||||
SELECT
|
||||
resp,
|
||||
_inserted_timestamp,
|
||||
api_url
|
||||
FROM
|
||||
requests
|
||||
JOIN api_key
|
||||
ON 1 = 1
|
||||
WHERE
|
||||
EXISTS (
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
requests
|
||||
)
|
||||
batched
|
||||
|
||||
@ -22,11 +22,8 @@ WHERE
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
collection_slug
|
||||
LIMIT
|
||||
4
|
||||
), api_key AS (
|
||||
),
|
||||
api_key AS (
|
||||
SELECT
|
||||
CONCAT(
|
||||
'{\'Authorization\': \'Token ',
|
||||
@ -40,20 +37,48 @@ LIMIT
|
||||
) }}
|
||||
WHERE
|
||||
api_name = 'deepnftvalue'
|
||||
)
|
||||
),
|
||||
row_nos AS (
|
||||
SELECT
|
||||
api_url,
|
||||
collection_slug,
|
||||
_id,
|
||||
ROW_NUMBER() over (
|
||||
ORDER BY
|
||||
api_url
|
||||
) AS row_no,
|
||||
FLOOR(
|
||||
row_no / 2
|
||||
) + 1 AS batch_no,
|
||||
header
|
||||
FROM
|
||||
requests
|
||||
JOIN api_key
|
||||
ON 1 = 1
|
||||
),
|
||||
batched AS ({% for item in range(11) %}
|
||||
SELECT
|
||||
ethereum.streamline.udf_api(' GET ', api_url, PARSE_JSON(header),{}) AS resp,
|
||||
SYSDATE() _inserted_timestamp,
|
||||
ethereum.streamline.udf_api(' GET ', api_url, PARSE_JSON(header),{}) AS resp, _id, SYSDATE() _inserted_timestamp, collection_slug
|
||||
FROM
|
||||
row_nos rn
|
||||
WHERE
|
||||
batch_no = {{ item }}
|
||||
AND EXISTS (
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
row_nos
|
||||
WHERE
|
||||
batch_no = {{ item }}
|
||||
LIMIT
|
||||
1) {% if not loop.last %}
|
||||
UNION ALL
|
||||
{% endif %}
|
||||
{% endfor %})
|
||||
SELECT
|
||||
resp,
|
||||
_inserted_timestamp,
|
||||
collection_slug,
|
||||
_id
|
||||
FROM
|
||||
requests
|
||||
JOIN api_key
|
||||
ON 1 = 1
|
||||
WHERE
|
||||
EXISTS (
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
requests
|
||||
)
|
||||
batched
|
||||
|
||||
@ -22,11 +22,8 @@ WHERE
|
||||
{{ this }}
|
||||
)
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
collection_slug
|
||||
LIMIT
|
||||
2
|
||||
), api_key AS (
|
||||
),
|
||||
api_key AS (
|
||||
SELECT
|
||||
CONCAT(
|
||||
'{\'Authorization\': \'Token ',
|
||||
@ -40,20 +37,48 @@ LIMIT
|
||||
) }}
|
||||
WHERE
|
||||
api_name = 'deepnftvalue'
|
||||
)
|
||||
),
|
||||
row_nos AS (
|
||||
SELECT
|
||||
api_url,
|
||||
_id,
|
||||
collection_slug,
|
||||
ROW_NUMBER () over (
|
||||
ORDER BY
|
||||
api_url
|
||||
) AS row_no,
|
||||
FLOOR(
|
||||
row_no / 2
|
||||
) + 1 AS batch_no,
|
||||
header
|
||||
FROM
|
||||
requests
|
||||
JOIN api_key
|
||||
ON 1 = 1
|
||||
),
|
||||
batched AS ({% for item in range(15) %}
|
||||
SELECT
|
||||
ethereum.streamline.udf_api(' GET ', api_url, PARSE_JSON(header),{}) AS resp,
|
||||
SYSDATE() _inserted_timestamp,
|
||||
ethereum.streamline.udf_api(' GET ', api_url, PARSE_JSON(header),{}) AS resp, SYSDATE() _inserted_timestamp, collection_slug, _id
|
||||
FROM
|
||||
row_nos rn
|
||||
WHERE
|
||||
batch_no = {{ item }}
|
||||
AND EXISTS (
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
row_nos
|
||||
WHERE
|
||||
batch_no = {{ item }}
|
||||
LIMIT
|
||||
1) {% if not loop.last %}
|
||||
UNION ALL
|
||||
{% endif %}
|
||||
{% endfor %})
|
||||
SELECT
|
||||
resp,
|
||||
_inserted_timestamp,
|
||||
collection_slug,
|
||||
_id
|
||||
FROM
|
||||
requests
|
||||
JOIN api_key
|
||||
ON 1 = 1
|
||||
WHERE
|
||||
EXISTS (
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
requests
|
||||
)
|
||||
batched
|
||||
|
||||
Loading…
Reference in New Issue
Block a user