Compare commits

...

8 Commits

Author SHA1 Message Date
Jim Myers
6d20b1c0cc Update tokens.txt 2025-01-26 15:58:47 -05:00
Jim Myers
4af0353a24 Bump python to version 2.1.0 2025-01-26 15:46:05 -05:00
Jim Myers
ae761cdf65
Merge pull request #39 from FlipsideCrypto/paul-bump-requests-version
bump python package versions
2025-01-26 15:38:00 -05:00
Paul Mikulskis
751f1adc70
Enhance query integration and model definitions with optional fields and improved defaults 2025-01-23 10:08:11 -05:00
Paul Mikulskis
2a5e4c6036
update python CI 2025-01-23 09:16:01 -05:00
Paul Mikulskis
e147cf8dd4
bump package versions 2025-01-23 09:13:48 -05:00
Don Cote
43a3044883
Merge pull request #37 from FlipsideCrypto/update-readme
update readme links and sql example
2024-07-18 13:27:07 -04:00
Don Cote
d1393c6a4c update readme links and sql example 2024-05-16 15:42:55 -04:00
30 changed files with 542 additions and 512 deletions

View File

@ -14,7 +14,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
python-version: ["3.7", "3.8", "3.9", "3.10"] python-version: ["3.8", "3.9", "3.10"]
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3

4
.gitignore vendored
View File

@ -19,6 +19,7 @@ node_modules
.output .output
build/ build/
*.egg-info/ *.egg-info/
.history/
/build/ /build/
/public/build /public/build
@ -32,3 +33,6 @@ r/shroomDK_0.1.0.tar.gz
python-sdk-example.py python-sdk-example.py
r/shroomDK/api_key.txt r/shroomDK/api_key.txt
r/shroomDK/test_of_page2_issue.R r/shroomDK/test_of_page2_issue.R
python/venv/
venv/
tokens.txt

View File

@ -5,20 +5,22 @@ Programmatic access to the most reliable & comprehensive blockchain data in Web3
You've found yourself at the FlipsideCrypto SDK repository, the official SDK to programmatically query all of Flipside Crypto's data. You've found yourself at the FlipsideCrypto SDK repository, the official SDK to programmatically query all of Flipside Crypto's data.
## 🧩 The Data ## 🧩 The Data
Flipside Crypto's Analytics Team has curated dozens of blockchain data sets with more being added each week. All tables available to query in Flipside's [Data Studio](https://flipsidecrypto.xyz) can be queried programmatically via our API and library of SDKs. Flipside Crypto's Analytics Team has curated dozens of blockchain data sets with more being added each week. All tables available to query in Flipside's [Data Studio](https://flipsidecrypto.xyz) can be queried programmatically via our API and library of SDKs.
## 📖 Official Docs ## 📖 Official Docs
[https://docs.flipsidecrypto.com/flipside-api/get-started](https://docs.flipsidecrypto.com/flipside-api/get-started) [https://docs.flipsidecrypto.com/flipside-api/get-started](https://docs.flipsidecrypto.com/flipside-api/get-started)
## 🗝 Want access? Genrate an API Key for Free ## 🗝 Want access? Genrate an API Key for Free
Get your [free API key here](https://flipsidecrypto.xyz/account/api-keys) Get your [free API key here](https://flipsidecrypto.xyz/api-keys)
<br> <br>
## SDKs ## SDKs
| Language | Version | Status | | Language | Version | Status |
| ------------------------ | ------- | ---------------------------------------------------------------------------------- | | ------------------------ | ------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| ✅ [Python](./python/) | 2.0.8 | [![tests](https://github.com/FlipsideCrypto/sdk/actions/workflows/ci_python.yml/badge.svg)](https://github.com/FlipsideCrypto/sdk/actions/workflows/ci_python.yml) | | ✅ [Python](./python/) | 2.1.0 | [![tests](https://github.com/FlipsideCrypto/sdk/actions/workflows/ci_python.yml/badge.svg)](https://github.com/FlipsideCrypto/sdk/actions/workflows/ci_python.yml) |
| ✅ [JS/TypeScript](./js) | 2.0.1 | [![tests](https://github.com/FlipsideCrypto/sdk/actions/workflows/ci_js.yml/badge.svg)](https://github.com/FlipsideCrypto/sdk/actions/workflows/ci_js.yml) | ✅ [JS/TypeScript](./js) | 2.0.1 | [![tests](https://github.com/FlipsideCrypto/sdk/actions/workflows/ci_js.yml/badge.svg)](https://github.com/FlipsideCrypto/sdk/actions/workflows/ci_js.yml) |
| ✅ [R](./r/shroomDK/) | 0.2.2 | [Available on CRAN](https://cran.r-project.org/web/packages/shroomDK/shroomDK.pdf) | | ✅ [R](./r/shroomDK/) | 0.2.2 | [Available on CRAN](https://cran.r-project.org/web/packages/shroomDK/shroomDK.pdf) |

View File

@ -1,126 +1,126 @@
{ {
"cells": [ "cells": [
{ {
"attachments": {}, "attachments": {},
"cell_type": "markdown", "cell_type": "markdown",
"metadata": {}, "metadata": {},
"source": [ "source": [
"# Intro to Flipside SDK/API: Getting Started\n", "# Intro to Flipside SDK/API: Getting Started\n",
"\n", "\n",
"<em>install Flipside with pip</em><br/>\n", "<em>install Flipside with pip</em><br/>\n",
"`pip install flipside`" "`pip install flipside`"
] ]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Import the package"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from flipside import Flipside"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Run your first query<br/>\n",
"<em>Remember to copy/paste your API Key from https://flipsidecrypto.xyz/api-keys below.</em>"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"YOUR_API_KEY = os.environ.get(\"FLIPSIDE_API_KEY\")\n",
"\n",
"# Invoke the ShroomDK class to create an instance of the SDK\n",
"sdk = Flipside(YOUR_API_KEY)\n",
"\n",
"# Run a query\n",
"query_result_set = sdk.query(\"\"\"\n",
" SELECT * FROM ethereum.core.ez_eth_transfers \n",
" WHERE \n",
" block_timestamp > GETDATE() - interval'90 days'\n",
" AND \n",
" (eth_from_address = lower('0xc2f41b3a1ff28fd2a6eee76ee12e51482fcfd11f')\n",
" OR eth_to_address = lower('0xc2f41b3a1ff28fd2a6eee76ee12e51482fcfd11f'))\n",
"\"\"\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Query Result Set\n",
"\n",
"```python\n",
"class QueryResultSet(BaseModel):\n",
" query_id: Union[str, None] = Field(None, description=\"The server id of the query\")\n",
" status: str = Field(False, description=\"The status of the query (`PENDING`, `FINISHED`, `ERROR`)\")\n",
" columns: Union[List[str], None] = Field(None, description=\"The names of the columns in the result set\")\n",
" column_types: Union[List[str], None] = Field(None, description=\"The type of the columns in the result set\")\n",
" rows: Union[List[Any], None] = Field(None, description=\"The results of the query\")\n",
" run_stats: Union[QueryRunStats, None] = Field(\n",
" None,\n",
" description=\"Summary stats on the query run (i.e. the number of rows returned, the elapsed time, etc)\",\n",
" )\n",
" records: Union[List[Any], None] = Field(None, description=\"The results of the query transformed as an array of objects\")\n",
" error: Any\n",
"\n",
"```"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"## Explore the result set object\n",
"\n",
"records = query_result_set.records\n",
"\n",
"print(records[0])"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3.10.1 64-bit",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.1"
},
"orig_nbformat": 4,
"vscode": {
"interpreter": {
"hash": "949777d72b0d2535278d3dc13498b2535136f6dfe0678499012e853ee9abcab1"
}
}
}, },
{ "nbformat": 4,
"cell_type": "markdown", "nbformat_minor": 2
"metadata": {},
"source": [
"Import the package"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from flipside import Flipside"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Run your first query<br/>\n",
"<em>Remember to copy/paste your API Key from https://flipsidecrypto.xyz/account/api-keys below.</em>"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"YOUR_API_KEY = os.environ.get(\"FLIPSIDE_API_KEY\")\n",
"\n",
"# Invoke the ShroomDK class to create an instance of the SDK\n",
"sdk = Flipside(YOUR_API_KEY)\n",
"\n",
"# Run a query\n",
"query_result_set = sdk.query(\"\"\"\n",
" SELECT * FROM ethereum.core.ez_eth_transfers \n",
" WHERE \n",
" block_timestamp > GETDATE() - interval'90 days'\n",
" AND \n",
" (eth_from_address = lower('0xc2f41b3a1ff28fd2a6eee76ee12e51482fcfd11f')\n",
" OR eth_to_address = lower('0xc2f41b3a1ff28fd2a6eee76ee12e51482fcfd11f'))\n",
"\"\"\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Query Result Set\n",
"\n",
"```python\n",
"class QueryResultSet(BaseModel):\n",
" query_id: Union[str, None] = Field(None, description=\"The server id of the query\")\n",
" status: str = Field(False, description=\"The status of the query (`PENDING`, `FINISHED`, `ERROR`)\")\n",
" columns: Union[List[str], None] = Field(None, description=\"The names of the columns in the result set\")\n",
" column_types: Union[List[str], None] = Field(None, description=\"The type of the columns in the result set\")\n",
" rows: Union[List[Any], None] = Field(None, description=\"The results of the query\")\n",
" run_stats: Union[QueryRunStats, None] = Field(\n",
" None,\n",
" description=\"Summary stats on the query run (i.e. the number of rows returned, the elapsed time, etc)\",\n",
" )\n",
" records: Union[List[Any], None] = Field(None, description=\"The results of the query transformed as an array of objects\")\n",
" error: Any\n",
"\n",
"```"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"## Explore the result set object\n",
"\n",
"records = query_result_set.records\n",
"\n",
"print(records[0])"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3.10.1 64-bit",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.1"
},
"orig_nbformat": 4,
"vscode": {
"interpreter": {
"hash": "949777d72b0d2535278d3dc13498b2535136f6dfe0678499012e853ee9abcab1"
}
}
},
"nbformat": 4,
"nbformat_minor": 2
} }

View File

@ -1,290 +1,290 @@
{ {
"cells": [ "cells": [
{ {
"attachments": {}, "attachments": {},
"cell_type": "markdown", "cell_type": "markdown",
"metadata": {}, "metadata": {},
"source": [ "source": [
"# Intro to Flipside API/SDK: Getting Started\n", "# Intro to Flipside API/SDK: Getting Started\n",
"\n", "\n",
"<em>install Flipside with pip</em><br/>\n", "<em>install Flipside with pip</em><br/>\n",
"`pip install flipside`" "`pip install flipside`"
] ]
}, },
{ {
"cell_type": "markdown", "cell_type": "markdown",
"metadata": {}, "metadata": {},
"source": [ "source": [
"Import the package" "Import the package"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"from flipside import Flipside" "from flipside import Flipside"
] ]
}, },
{ {
"attachments": {}, "attachments": {},
"cell_type": "markdown", "cell_type": "markdown",
"metadata": {}, "metadata": {},
"source": [ "source": [
"Run your first query<br/>\n", "Run your first query<br/>\n",
"<em>Remember to copy/paste your API Key from https://flipsidecrypto.xyz/account/api-keys below.</em>" "<em>Remember to copy/paste your API Key from https://flipsidecrypto.xyz/api-keys below.</em>"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"import os\n", "import os\n",
"YOUR_API_KEY = os.environ.get(\"FLIPSIDE_API_KEY\")\n", "YOUR_API_KEY = os.environ.get(\"FLIPSIDE_API_KEY\")\n",
"\n", "\n",
"# Invoke the ShroomDK class to create an instance of the SDK\n", "# Invoke the ShroomDK class to create an instance of the SDK\n",
"sdk = Flipside(YOUR_API_KEY)\n", "sdk = Flipside(YOUR_API_KEY)\n",
"\n", "\n",
"# Run a query\n", "# Run a query\n",
"xMETRIC_contract_address = '0x15848C9672e99be386807b9101f83A16EB017bb5'\n", "xMETRIC_contract_address = '0x15848C9672e99be386807b9101f83A16EB017bb5'\n",
"\n", "\n",
"query_result_set = sdk.query(f\"\"\"\n", "query_result_set = sdk.query(f\"\"\"\n",
" SELECT count(distinct to_address) as recipient_count\n", " SELECT count(distinct to_address) as recipient_count\n",
" FROM polygon.core.fact_token_transfers\n", " FROM polygon.core.fact_token_transfers\n",
" WHERE block_timestamp > '2022-07-10T00:00:00'\n", " WHERE block_timestamp > '2022-07-10T00:00:00'\n",
" AND contract_address = lower('{xMETRIC_contract_address}')\n", " AND contract_address = lower('{xMETRIC_contract_address}')\n",
" AND to_address != lower('0x4b8923746a1D9943bbd408F477572762801efE4d')\n", " AND to_address != lower('0x4b8923746a1D9943bbd408F477572762801efE4d')\n",
"\"\"\")\n" "\"\"\")\n"
] ]
}, },
{ {
"cell_type": "markdown", "cell_type": "markdown",
"metadata": {}, "metadata": {},
"source": [ "source": [
"### Query Result Set\n", "### Query Result Set\n",
"\n", "\n",
"```python\n", "```python\n",
"class QueryResultSet(BaseModel):\n", "class QueryResultSet(BaseModel):\n",
" query_id: Union[str, None] = Field(None, description=\"The server id of the query\")\n", " query_id: Union[str, None] = Field(None, description=\"The server id of the query\")\n",
" status: str = Field(False, description=\"The status of the query (`PENDING`, `FINISHED`, `ERROR`)\")\n", " status: str = Field(False, description=\"The status of the query (`PENDING`, `FINISHED`, `ERROR`)\")\n",
" columns: Union[List[str], None] = Field(None, description=\"The names of the columns in the result set\")\n", " columns: Union[List[str], None] = Field(None, description=\"The names of the columns in the result set\")\n",
" column_types: Union[List[str], None] = Field(None, description=\"The type of the columns in the result set\")\n", " column_types: Union[List[str], None] = Field(None, description=\"The type of the columns in the result set\")\n",
" rows: Union[List[Any], None] = Field(None, description=\"The results of the query\")\n", " rows: Union[List[Any], None] = Field(None, description=\"The results of the query\")\n",
" run_stats: Union[QueryRunStats, None] = Field(\n", " run_stats: Union[QueryRunStats, None] = Field(\n",
" None,\n", " None,\n",
" description=\"Summary stats on the query run (i.e. the number of rows returned, the elapsed time, etc)\",\n", " description=\"Summary stats on the query run (i.e. the number of rows returned, the elapsed time, etc)\",\n",
" )\n", " )\n",
" records: Union[List[Any], None] = Field(None, description=\"The results of the query transformed as an array of objects\")\n", " records: Union[List[Any], None] = Field(None, description=\"The results of the query transformed as an array of objects\")\n",
" error: Any\n", " error: Any\n",
"\n", "\n",
"```" "```"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"## Explore the result set object\n", "## Explore the result set object\n",
"\n", "\n",
"records = query_result_set.records\n", "records = query_result_set.records\n",
"\n", "\n",
"print(records[0])\n", "print(records[0])\n",
"\n", "\n",
"print(f\"There are {records[0]['recipient_count']} unique recipients of xMETRIC tokens.\")" "print(f\"There are {records[0]['recipient_count']} unique recipients of xMETRIC tokens.\")"
] ]
}, },
{ {
"cell_type": "markdown", "cell_type": "markdown",
"metadata": {}, "metadata": {},
"source": [ "source": [
"### xMETRIC Leaderboard\n", "### xMETRIC Leaderboard\n",
"Retrieve the balance of every xMETRIC holder" "Retrieve the balance of every xMETRIC holder"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"metadata": { "metadata": {
"vscode": { "vscode": {
"languageId": "sql" "languageId": "sql"
}
},
"outputs": [],
"source": [
"WITH sent_tokens AS (\n",
" SELECT \n",
" to_address as Participant,\n",
" sum(raw_amount/pow(10,18)) as xMETRIC\n",
" FROM polygon.core.fact_token_transfers\n",
" WHERE\n",
" block_timestamp::date > '2022-07-10'::date \n",
" AND contract_address = lower('0x15848C9672e99be386807b9101f83A16EB017bb5')\n",
" AND to_address != lower('0x4b8923746a1D9943bbd408F477572762801efE4d')\n",
" GROUP BY 1\n",
"),\n",
"burnt_tokens AS (\n",
" SELECT\n",
" to_address as Participant,\n",
" sum(raw_amount/pow(10,18)) as xMETRIC\n",
" FROM polygon.core.fact_token_transfers\n",
" WHERE\n",
" block_timestamp::date > '2022-07-10'::date \n",
" AND contract_address = lower('0x15848C9672e99be386807b9101f83A16EB017bb5')\n",
" AND to_address = lower('0x0000000000000000000000000000000000000000')\n",
" GROUP BY 1\n",
")\n",
"SELECT\n",
" sent_tokens.Participant as \"participant_addr\",\n",
" coalesce(sent_tokens.xmetric,0) - coalesce(burnt_tokens.xMETRIC,0) as \"balance\"\n",
"FROM sent_tokens \n",
"LEFT JOIN burnt_tokens ON sent_tokens.Participant = burnt_tokens.Participant\n",
"ORDER BY 2 DESC"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Load the sql query from a file\n",
"leaderboard_sql_query = open(\"./sql/xmetric_leaderboard.sql\", 'r').read()\n",
"\n",
"# Run the query with pagination\n",
"\n",
"page_number = 1\n",
"page_size = 10\n",
"\n",
"leaderboard_result_set = sdk.query(\n",
" leaderboard_sql_query, \n",
" page_size=page_size,\n",
" page_number=page_number)\n",
"\n",
"for record in leaderboard_result_set.records:\n",
" print(record)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Plot the xMETRIC LeaderBoard Results"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"full_leaderboard_result_set = sdk.query(leaderboard_sql_query)\n",
"\n",
"import pandas as pd\n",
"import plotly.express as px\n",
"\n",
"df = pd.DataFrame(full_leaderboard_result_set.records)\n",
"\n",
"fig = px.histogram(df, x=\"balance\", marginal=\"box\", hover_data=df.columns, nbins=200)\n",
"\n",
"fig.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Cross Chain xMETRIC User Exploration"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"vscode": {
"languageId": "sql"
}
},
"outputs": [],
"source": [
"WITH xmetric_holders AS (\n",
" SELECT to_address as holder_addr\n",
" FROM polygon.core.fact_token_transfers\n",
" WHERE block_timestamp > '2022-07-10T00:00:00'\n",
" AND contract_address = lower('0x15848C9672e99be386807b9101f83A16EB017bb5')\n",
" AND to_address != lower('0x4b8923746a1D9943bbd408F477572762801efE4d')\n",
")\n",
"SELECT\n",
" token_name,\n",
" symbol,\n",
" count(distinct user_address) as num_holders,\n",
" median(usd_value_now) as median_usd_holdings\n",
"FROM ethereum.core.ez_current_balances\n",
"INNER JOIN xmetric_holders \n",
" ON ethereum.core.ez_current_balances.user_address = xmetric_holders.holder_addr\n",
"WHERE ethereum.core.ez_current_balances.usd_value_now > 0\n",
"GROUP BY 1, 2\n",
"ORDER BY 3 DESC\n",
"LIMIT 25"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Load the sql query from a file\n",
"xmetric_eth_holdings_sql_query = open(\"./sql/xmetric_eth_holdings.sql\", 'r').read()\n",
"\n",
"# Run the query\n",
"xmetric_eth_holdings_results = sdk.query(xmetric_eth_holdings_sql_query)\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Plot the results\n",
"df = pd.DataFrame(xmetric_eth_holdings_results.records)\n",
"\n",
"fig = px.bar(df, x=\"token_name\", y=\"num_holders\", hover_data=df.columns)\n",
"\n",
"fig.show()"
]
} }
}, ],
"outputs": [], "metadata": {
"source": [ "kernelspec": {
"WITH sent_tokens AS (\n", "display_name": "Python 3.10.1 64-bit",
" SELECT \n", "language": "python",
" to_address as Participant,\n", "name": "python3"
" sum(raw_amount/pow(10,18)) as xMETRIC\n", },
" FROM polygon.core.fact_token_transfers\n", "language_info": {
" WHERE\n", "codemirror_mode": {
" block_timestamp::date > '2022-07-10'::date \n", "name": "ipython",
" AND contract_address = lower('0x15848C9672e99be386807b9101f83A16EB017bb5')\n", "version": 3
" AND to_address != lower('0x4b8923746a1D9943bbd408F477572762801efE4d')\n", },
" GROUP BY 1\n", "file_extension": ".py",
"),\n", "mimetype": "text/x-python",
"burnt_tokens AS (\n", "name": "python",
" SELECT\n", "nbconvert_exporter": "python",
" to_address as Participant,\n", "pygments_lexer": "ipython3",
" sum(raw_amount/pow(10,18)) as xMETRIC\n", "version": "3.10.1"
" FROM polygon.core.fact_token_transfers\n", },
" WHERE\n", "orig_nbformat": 4,
" block_timestamp::date > '2022-07-10'::date \n",
" AND contract_address = lower('0x15848C9672e99be386807b9101f83A16EB017bb5')\n",
" AND to_address = lower('0x0000000000000000000000000000000000000000')\n",
" GROUP BY 1\n",
")\n",
"SELECT\n",
" sent_tokens.Participant as \"participant_addr\",\n",
" coalesce(sent_tokens.xmetric,0) - coalesce(burnt_tokens.xMETRIC,0) as \"balance\"\n",
"FROM sent_tokens \n",
"LEFT JOIN burnt_tokens ON sent_tokens.Participant = burnt_tokens.Participant\n",
"ORDER BY 2 DESC"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Load the sql query from a file\n",
"leaderboard_sql_query = open(\"./sql/xmetric_leaderboard.sql\", 'r').read()\n",
"\n",
"# Run the query with pagination\n",
"\n",
"page_number = 1\n",
"page_size = 10\n",
"\n",
"leaderboard_result_set = sdk.query(\n",
" leaderboard_sql_query, \n",
" page_size=page_size,\n",
" page_number=page_number)\n",
"\n",
"for record in leaderboard_result_set.records:\n",
" print(record)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Plot the xMETRIC LeaderBoard Results"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"full_leaderboard_result_set = sdk.query(leaderboard_sql_query)\n",
"\n",
"import pandas as pd\n",
"import plotly.express as px\n",
"\n",
"df = pd.DataFrame(full_leaderboard_result_set.records)\n",
"\n",
"fig = px.histogram(df, x=\"balance\", marginal=\"box\", hover_data=df.columns, nbins=200)\n",
"\n",
"fig.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Cross Chain xMETRIC User Exploration"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"vscode": { "vscode": {
"languageId": "sql" "interpreter": {
"hash": "949777d72b0d2535278d3dc13498b2535136f6dfe0678499012e853ee9abcab1"
}
} }
},
"outputs": [],
"source": [
"WITH xmetric_holders AS (\n",
" SELECT to_address as holder_addr\n",
" FROM polygon.core.fact_token_transfers\n",
" WHERE block_timestamp > '2022-07-10T00:00:00'\n",
" AND contract_address = lower('0x15848C9672e99be386807b9101f83A16EB017bb5')\n",
" AND to_address != lower('0x4b8923746a1D9943bbd408F477572762801efE4d')\n",
")\n",
"SELECT\n",
" token_name,\n",
" symbol,\n",
" count(distinct user_address) as num_holders,\n",
" median(usd_value_now) as median_usd_holdings\n",
"FROM ethereum.core.ez_current_balances\n",
"INNER JOIN xmetric_holders \n",
" ON ethereum.core.ez_current_balances.user_address = xmetric_holders.holder_addr\n",
"WHERE ethereum.core.ez_current_balances.usd_value_now > 0\n",
"GROUP BY 1, 2\n",
"ORDER BY 3 DESC\n",
"LIMIT 25"
]
}, },
{ "nbformat": 4,
"cell_type": "code", "nbformat_minor": 2
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Load the sql query from a file\n",
"xmetric_eth_holdings_sql_query = open(\"./sql/xmetric_eth_holdings.sql\", 'r').read()\n",
"\n",
"# Run the query\n",
"xmetric_eth_holdings_results = sdk.query(xmetric_eth_holdings_sql_query)\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Plot the results\n",
"df = pd.DataFrame(xmetric_eth_holdings_results.records)\n",
"\n",
"fig = px.bar(df, x=\"token_name\", y=\"num_holders\", hover_data=df.columns)\n",
"\n",
"fig.show()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3.10.1 64-bit",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.1"
},
"orig_nbformat": 4,
"vscode": {
"interpreter": {
"hash": "949777d72b0d2535278d3dc13498b2535136f6dfe0678499012e853ee9abcab1"
}
}
},
"nbformat": 4,
"nbformat_minor": 2
} }

View File

@ -7,7 +7,7 @@ Programmatic access to the most comprehensive blockchain data in Web3 🥳.
![tests](https://github.com/flipsidecrypto/sdk/actions/workflows/ci_js.yml/badge.svg) ![tests](https://github.com/flipsidecrypto/sdk/actions/workflows/ci_js.yml/badge.svg)
<br> <br>
<br> <br>
You've found yourself at the Flipside Crypto JS/typescript sdk. You've found yourself at the Flipside Crypto JS/typescript SDK.
<br> <br>
<br> <br>
@ -23,6 +23,10 @@ or if using npm
npm install @flipsidecrypto/sdk npm install @flipsidecrypto/sdk
``` ```
## 🗝 Genrate an API Key for Free
Get your [free API key here](https://flipsidecrypto.xyz/api-keys)
## 🦾 Getting Started ## 🦾 Getting Started
```typescript ```typescript
@ -39,7 +43,7 @@ const myAddress = "0x....";
// Create a query object for the `query.run` function to execute // Create a query object for the `query.run` function to execute
const query: Query = { const query: Query = {
sql: `select nft_address, mint_price_eth, mint_price_usd from flipside_prod_db.ethereum_core.ez_nft_mints where nft_to_address = LOWER('${myAddress}')`, sql: `select nft_address, mint_price_eth, mint_price_usd from ethereum.nft.ez_nft_mints where nft_to_address = LOWER('${myAddress}')`,
maxAgeMinutes: 30, maxAgeMinutes: 30,
}; };
@ -48,10 +52,10 @@ const result: QueryResultSet = await flipside.query.run(query);
// Iterate over the results // Iterate over the results
result.records.map((record) => { result.records.map((record) => {
const nftAddress = record.nft_address const nftAddress = record.nft_address;
const mintPriceEth = record.mint_price_eth const mintPriceEth = record.mint_price_eth;
const mintPriceUSD = = record.mint_price_usd const mintPriceUSD = = record.mint_price_usd;
console.log(`address ${nftAddress} minted at a price of ${mintPrice} ETH or $${mintPriceUSD} USD`); console.log(`address ${nftAddress} minted at a price of ${mintPriceEth} ETH or $${mintPriceUSD} USD`);
}); });
``` ```
@ -99,7 +103,7 @@ Let's create a query to retrieve all NFTs minted by an address:
const yourAddress = "<your_ethereum_address>"; const yourAddress = "<your_ethereum_address>";
const query: Query = { const query: Query = {
sql: `select nft_address, mint_price_eth, mint_price_usd from flipside_prod_db.ethereum_core.ez_nft_mints where nft_to_address = LOWER('${myAddress}')`, sql: `select nft_address, mint_price_eth, mint_price_usd from ethereum.nft.ez_nft_mints where nft_to_address = LOWER('${myAddress}')`,
maxAgeMinutes: 5, maxAgeMinutes: 5,
cached: true, cached: true,
timeoutMinutes: 15, timeoutMinutes: 15,
@ -298,7 +302,7 @@ Set `maxAgeMinutes` to 30:
```typescript ```typescript
const query: Query = { const query: Query = {
sql: `select nft_address, mint_price_eth, mint_price_usd from flipside_prod_db.ethereum_core.ez_nft_mints where nft_to_address = LOWER('${myAddress}')`, sql: `select nft_address, mint_price_eth, mint_price_usd from ethereum.nft.ez_nft_mints where nft_to_address = LOWER('${myAddress}')`,
maxAgeMinutes: 30 maxAgeMinutes: 30
}; };
``` ```
@ -309,13 +313,13 @@ If you would like to force a cache bust and re-execute the query. You have two o
```typescript ```typescript
const query: Query = { const query: Query = {
sql: `select nft_address, mint_price_eth, mint_price_usd from flipside_prod_db.ethereum_core.ez_nft_mints where nft_to_address = LOWER('${myAddress}')`, sql: `select nft_address, mint_price_eth, mint_price_usd from ethereum.nft.ez_nft_mints where nft_to_address = LOWER('${myAddress}')`,
maxAgeMinutes: 0 maxAgeMinutes: 0
}; };
// or: // or:
const query: Query = { const query: Query = {
sql: `select nft_address, mint_price_eth, mint_price_usd from flipside_prod_db.ethereum_core.ez_nft_mints where nft_to_address = LOWER('${myAddress}')`, sql: `select nft_address, mint_price_eth, mint_price_usd from ethereum.nft.ez_nft_mints where nft_to_address = LOWER('${myAddress}')`,
maxAgeMinutes: 30, maxAgeMinutes: 30,
cache: false cache: false
}; };
@ -358,4 +362,4 @@ Flipside does NOT charge for the number of bytes/records returned.
### Client Side Request Requirements ### Client Side Request Requirements
All API Keys correspond to a list of hostnames. Client-side requests that do not originate from the corresponding hostname will fail. You may configure hostnames [here](https://flipsidecrypto.xyz/account/api-keys). All API Keys correspond to a list of hostnames. Client-side requests that do not originate from the corresponding hostname will fail. You may configure hostnames [here](https://flipsidecrypto.xyz/api-keys).

View File

@ -1 +1 @@
2.0.8 2.1.0

0
python/log.txt Normal file
View File

View File

@ -1,2 +1,3 @@
pytest==6.2.4 pytest==6.2.4
freezegun==1.1.0 freezegun==1.1.0
requests-mock==1.11.0

View File

@ -1,2 +1,2 @@
pydantic==1.10.9 pydantic==2.10.0
requests==2.29.0 requests==2.32.0

View File

@ -32,11 +32,10 @@ setup(
"Intended Audience :: Developers", # Define that your audience are developers "Intended Audience :: Developers", # Define that your audience are developers
"License :: OSI Approved :: MIT License", # Again, pick a license "License :: OSI Approved :: MIT License", # Again, pick a license
"Operating System :: OS Independent", "Operating System :: OS Independent",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.10",
], ],
dependency_links=[], dependency_links=[],
python_requires=">=3.7", python_requires=">=3.8",
) )

View File

@ -11,7 +11,7 @@ from .rpc import RPC
API_BASE_URL = "https://api-v2.flipsidecrypto.xyz" API_BASE_URL = "https://api-v2.flipsidecrypto.xyz"
SDK_VERSION = "2.0.4" SDK_VERSION = "2.1.0"
SDK_PACKAGE = "python" SDK_PACKAGE = "python"
DEFAULT_DATA_SOURCE = "snowflake-default" DEFAULT_DATA_SOURCE = "snowflake-default"

View File

@ -39,21 +39,22 @@ class CompassQueryIntegration(object):
def run(self, query: Query) -> QueryResultSet: def run(self, query: Query) -> QueryResultSet:
query = self._set_query_defaults(query) query = self._set_query_defaults(query)
# Use the default values from Query class when None
ttl_hours = int((query.ttl_minutes or 0) / 60)
max_age_minutes = query.max_age_minutes or 5 # default from Query class
retry_interval_seconds = query.retry_interval_seconds or 1 # default from Query class
create_query_run_params = CreateQueryRunRpcParams( create_query_run_params = CreateQueryRunRpcParams(
resultTTLHours=int(query.ttl_minutes / 60) resultTTLHours=ttl_hours,
if query.ttl_minutes sql=query.sql or "",
else DEFAULTS.ttl_minutes, maxAgeMinutes=max_age_minutes,
sql=query.sql,
maxAgeMinutes=query.max_age_minutes
if query.max_age_minutes
else DEFAULTS.max_age_minutes,
tags=Tags( tags=Tags(
sdk_language="python", sdk_language="python",
sdk_package=query.sdk_package, sdk_package=query.sdk_package,
sdk_version=query.sdk_version, sdk_version=query.sdk_version,
), ),
dataSource=query.data_source if query.data_source else "snowflake-default", dataSource=query.data_source or "snowflake-default",
dataProvider=query.data_provider if query.data_provider else "flipside", dataProvider=query.data_provider or "flipside",
) )
created_query = self.rpc.create_query(create_query_run_params) created_query = self.rpc.create_query(create_query_run_params)
if created_query.error: if created_query.error:
@ -67,18 +68,16 @@ class CompassQueryIntegration(object):
query_run = self._get_query_run_loop( query_run = self._get_query_run_loop(
created_query.result.queryRun.id, created_query.result.queryRun.id,
page_number=query.page_number, page_number=query.page_number or 1,
page_size=query.page_size, page_size=query.page_size or 100000,
timeout_minutes=query.timeout_minutes if query.timeout_minutes else 20, timeout_minutes=query.timeout_minutes or 20,
retry_interval_seconds=query.retry_interval_seconds retry_interval_seconds=retry_interval_seconds,
if query.retry_interval_seconds
else 1,
) )
query_result = self._get_query_results( query_result = self._get_query_results(
query_run.id, query_run.id,
page_number=query.page_number if query.page_number else 1, page_number=query.page_number or 1,
page_size=query.page_size if query.page_size else 100000, page_size=query.page_size or 100000,
) )
return QueryResultSetBuilder( return QueryResultSetBuilder(

View File

@ -23,4 +23,4 @@ class CancelQueryRunRpcResult(BaseModel):
class CancelQueryRunRpcResponse(RpcResponse): class CancelQueryRunRpcResponse(RpcResponse):
result: Union[CancelQueryRunRpcResult, None] result: Union[CancelQueryRunRpcResult, None] = None

View File

@ -11,23 +11,23 @@ class QueryRun(BaseModel):
sqlStatementId: str sqlStatementId: str
state: str state: str
path: str path: str
fileCount: Optional[int] fileCount: Optional[int] = None
lastFileNumber: Optional[int] lastFileNumber: Optional[int] = None
fileNames: Optional[str] fileNames: Optional[str] = None
errorName: Optional[str] errorName: Optional[str] = None
errorMessage: Optional[str] errorMessage: Optional[str] = None
errorData: Optional[Any] errorData: Optional[Any] = None
dataSourceQueryId: Optional[str] dataSourceQueryId: Optional[str] = None
dataSourceSessionId: Optional[str] dataSourceSessionId: Optional[str] = None
startedAt: Optional[str] startedAt: Optional[str] = None
queryRunningEndedAt: Optional[str] queryRunningEndedAt: Optional[str] = None
queryStreamingEndedAt: Optional[str] queryStreamingEndedAt: Optional[str] = None
endedAt: Optional[str] endedAt: Optional[str] = None
rowCount: Optional[int] rowCount: Optional[int] = None
totalSize: Optional[int] totalSize: Optional[int] = None
tags: Tags tags: Tags
dataSourceId: str dataSourceId: str
userId: str userId: str
createdAt: str createdAt: str
updatedAt: datetime updatedAt: datetime
archivedAt: Optional[datetime] archivedAt: Optional[datetime] = None

View File

@ -6,4 +6,4 @@ from pydantic import BaseModel
class RpcError(BaseModel): class RpcError(BaseModel):
code: int code: int
message: str message: str
data: Optional[Any] data: Optional[Any] = None

View File

@ -8,5 +8,5 @@ from .rpc_error import RpcError
class RpcResponse(BaseModel): class RpcResponse(BaseModel):
jsonrpc: str jsonrpc: str
id: int id: int
result: Union[Optional[Dict[str, Any]], None] result: Union[Optional[Dict[str, Any]], None] = None
error: Optional[RpcError] error: Optional[RpcError] = None

View File

@ -10,7 +10,7 @@ class SqlStatement(BaseModel):
id: str id: str
statementHash: str statementHash: str
sql: str sql: str
columnMetadata: Optional[ColumnMetadata] columnMetadata: Optional[ColumnMetadata] = None
userId: str userId: str
tags: Tags tags: Tags
createdAt: str createdAt: str

View File

@ -5,6 +5,6 @@ from pydantic import BaseModel
class Tags(BaseModel): class Tags(BaseModel):
sdk_package: Optional[str] sdk_package: Optional[str] = None
sdk_version: Optional[str] sdk_version: Optional[str] = None
sdk_language: Optional[str] sdk_language: Optional[str] = None

View File

@ -33,4 +33,4 @@ class CreateQueryRunRpcResult(BaseModel):
class CreateQueryRunRpcResponse(RpcResponse): class CreateQueryRunRpcResponse(RpcResponse):
result: Union[CreateQueryRunRpcResult, None] result: Union[CreateQueryRunRpcResult, None] = None

View File

@ -21,8 +21,8 @@ class GetQueryRunRpcRequest(RpcRequest):
# Response # Response
class GetQueryRunRpcResult(BaseModel): class GetQueryRunRpcResult(BaseModel):
queryRun: QueryRun queryRun: QueryRun
redirectedToQueryRun: Optional[QueryRun] redirectedToQueryRun: Optional[QueryRun] = None
class GetQueryRunRpcResponse(RpcResponse): class GetQueryRunRpcResponse(RpcResponse):
result: Union[GetQueryRunRpcResult, None] result: Union[GetQueryRunRpcResult, None] = None

View File

@ -1,6 +1,6 @@
from typing import Any, Dict, List, Optional, Union from typing import Any, Dict, List, Optional, Union
from pydantic import BaseModel from pydantic import ConfigDict, BaseModel
from .core.page import Page from .core.page import Page
from .core.page_stats import PageStats from .core.page_stats import PageStats
@ -22,9 +22,13 @@ class Filter(BaseModel):
like: Optional[Any] = None like: Optional[Any] = None
in_: Optional[List[Any]] = None in_: Optional[List[Any]] = None
notIn: Optional[List[Any]] = None notIn: Optional[List[Any]] = None
# TODO[pydantic]: The following keys were removed: `fields`.
class Config: # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
fields = {"in_": "in"} model_config = ConfigDict(
alias_generator=None,
populate_by_name=True,
json_schema_extra={"fields": {"in_": "in"}}
)
def dict(self, *args, **kwargs) -> dict: def dict(self, *args, **kwargs) -> dict:
kwargs.setdefault("exclude_none", True) # Exclude keys with None values kwargs.setdefault("exclude_none", True) # Exclude keys with None values
@ -62,15 +66,15 @@ class GetQueryRunResultsRpcRequest(RpcRequest):
# Response # Response
class GetQueryRunResultsRpcResult(BaseModel): class GetQueryRunResultsRpcResult(BaseModel):
columnNames: Union[Optional[List[str]], None] columnNames: Union[Optional[List[str]], None] = None
columnTypes: Union[Optional[List[str]], None] columnTypes: Union[Optional[List[str]], None] = None
rows: Union[List[Any], None] rows: Union[List[Any], None] = None
page: Union[PageStats, None] page: Union[PageStats, None] = None
sql: Union[str, None] sql: Union[str, None] = None
format: Union[ResultFormat, None] format: Union[ResultFormat, None] = None
originalQueryRun: QueryRun originalQueryRun: QueryRun
redirectedToQueryRun: Union[QueryRun, None] redirectedToQueryRun: Union[QueryRun, None] = None
class GetQueryRunResultsRpcResponse(RpcResponse): class GetQueryRunResultsRpcResponse(RpcResponse):
result: Union[GetQueryRunResultsRpcResult, None] result: Union[GetQueryRunResultsRpcResult, None] = None

View File

@ -23,4 +23,4 @@ class GetSqlStatemetnResult(BaseModel):
class GetSqlStatementResponse(RpcResponse): class GetSqlStatementResponse(RpcResponse):
result: Union[GetSqlStatemetnResult, None] result: Union[GetSqlStatemetnResult, None] = None

View File

@ -4,7 +4,7 @@ from pydantic import BaseModel, Field
class Query(BaseModel): class Query(BaseModel):
sql: str = Field(None, description="SQL query to execute") sql: Optional[str] = Field(None, description="SQL query to execute")
ttl_minutes: Optional[int] = Field( ttl_minutes: Optional[int] = Field(
None, description="The number of minutes to cache the query results" None, description="The number of minutes to cache the query results"
) )
@ -21,8 +21,8 @@ class Query(BaseModel):
None, None,
description="An override on the cache. A value of true will Re-Execute the query.", description="An override on the cache. A value of true will Re-Execute the query.",
) )
page_size: int = Field(None, description="The number of results to return per page") page_size: Optional[int] = Field(None, description="The number of results to return per page")
page_number: int = Field(None, description="The page number to return") page_number: Optional[int] = Field(None, description="The page number to return")
sdk_package: Optional[str] = Field( sdk_package: Optional[str] = Field(
None, description="The SDK package used for the query" None, description="The SDK package used for the query"
) )

View File

@ -1,20 +1,21 @@
from typing import Optional
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
class QueryDefaults(BaseModel): class QueryDefaults(BaseModel):
ttl_minutes: int = Field( ttl_minutes: Optional[int] = Field(
None, description="The number of minutes to cache the query results" None, description="The number of minutes to cache the query results"
) )
max_age_minutes: int = Field( max_age_minutes: Optional[int] = Field(
None, None,
description="The max age of query results to accept before deciding to run a query again", description="The max age of query results to accept before deciding to run a query again",
) )
cached: bool = Field(False, description="Whether or not to cache the query results") cached: bool = Field(False, description="Whether or not to cache the query results")
timeout_minutes: int = Field( timeout_minutes: Optional[int] = Field(
None, description="The number of minutes to timeout the query" None, description="The number of minutes to timeout the query"
) )
retry_interval_seconds: float = Field( retry_interval_seconds: Optional[float] = Field(
None, description="The number of seconds to wait before retrying the query" None, description="The number of seconds to wait before retrying the query"
) )
page_size: int = Field(None, description="The number of results to return per page") page_size: Optional[int] = Field(None, description="The number of results to return per page")
page_number: int = Field(None, description="The page number to return") page_number: Optional[int] = Field(None, description="The page number to return")

View File

@ -10,7 +10,7 @@ class QueryResultSet(BaseModel):
query_id: Union[str, None] = Field(None, description="The server id of the query") query_id: Union[str, None] = Field(None, description="The server id of the query")
status: str = Field( status: str = Field(
False, description="The status of the query (`PENDING`, `FINISHED`, `ERROR`)" "PENDING", description="The status of the query (`PENDING`, `FINISHED`, `ERROR`)"
) )
columns: Union[List[str], None] = Field( columns: Union[List[str], None] = Field(
None, description="The names of the columns in the result set" None, description="The names of the columns in the result set"
@ -29,4 +29,4 @@ class QueryResultSet(BaseModel):
page: Union[PageStats, None] = Field( page: Union[PageStats, None] = Field(
None, description="Summary of page stats for this query result set" None, description="Summary of page stats for this query result set"
) )
error: Any error: Any = None

View File

@ -1,40 +1,41 @@
from datetime import datetime from datetime import datetime
from typing import Optional
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
class QueryRunStats(BaseModel): class QueryRunStats(BaseModel):
started_at: datetime = Field(None, description="The start time of the query run.") started_at: Optional[datetime] = Field(None, description="The start time of the query run.")
ended_at: datetime = Field(None, description="The end time of the query run.") ended_at: Optional[datetime] = Field(None, description="The end time of the query run.")
query_exec_started_at: datetime = Field( query_exec_started_at: Optional[datetime] = Field(
None, description="The start time of query execution." None, description="The start time of query execution."
) )
query_exec_ended_at: datetime = Field( query_exec_ended_at: Optional[datetime] = Field(
None, description="The end time of query execution." None, description="The end time of query execution."
) )
streaming_started_at: datetime = Field( streaming_started_at: Optional[datetime] = Field(
None, description="The start time of streaming query results." None, description="The start time of streaming query results."
) )
streaming_ended_at: datetime = Field( streaming_ended_at: Optional[datetime] = Field(
None, description="The end time of streaming query results." None, description="The end time of streaming query results."
) )
elapsed_seconds: int = Field( elapsed_seconds: Optional[int] = Field(
None, None,
description="The number of seconds elapsed between the start and end times.", description="The number of seconds elapsed between the start and end times.",
) )
queued_seconds: int = Field( queued_seconds: Optional[int] = Field(
None, None,
description="The number of seconds elapsed between when the query was created and when execution on the data source began.", description="The number of seconds elapsed between when the query was created and when execution on the data source began.",
) )
streaming_seconds: int = Field( streaming_seconds: Optional[int] = Field(
None, None,
description="The number of seconds elapsed between when the query execution completed and results were fully streamed to Flipside's servers.", description="The number of seconds elapsed between when the query execution completed and results were fully streamed to Flipside's servers.",
) )
query_exec_seconds: int = Field( query_exec_seconds: Optional[int] = Field(
None, None,
description="The number of seconds elapsed between when the query execution started and when it completed on the data source.", description="The number of seconds elapsed between when the query execution started and when it completed on the data source.",
) )
record_count: int = Field( record_count: Optional[int] = Field(
None, description="The number of records returned by the query." None, description="The number of records returned by the query."
) )
bytes: int = Field(None, description="The number of bytes returned by the query.") bytes: Optional[int] = Field(None, description="The number of bytes returned by the query.")

View File

@ -6,4 +6,4 @@ from pydantic import BaseModel
class SleepConfig(BaseModel): class SleepConfig(BaseModel):
attempts: int attempts: int
timeout_minutes: Union[int, float] timeout_minutes: Union[int, float]
interval_seconds: Optional[float] interval_seconds: Optional[float] = None

View File

@ -1,4 +1,6 @@
import json import json
import pytest
import requests_mock
from ....errors import ( from ....errors import (
ApiError, ApiError,
@ -20,6 +22,12 @@ from ...utils.mock_data.get_sql_statement import get_sql_statement_response
SDK_VERSION = "1.0.2" SDK_VERSION = "1.0.2"
SDK_PACKAGE = "python" SDK_PACKAGE = "python"
# Add the fixture decorator
@pytest.fixture(autouse=True)
def requests_mock_fixture():
with requests_mock.Mocker() as m:
yield m
def get_rpc(): def get_rpc():
return RPC("https://test.com", "api_key") return RPC("https://test.com", "api_key")

View File

@ -1,4 +1,6 @@
import json import json
import pytest
import requests_mock
from ..errors.server_error import ServerError from ..errors.server_error import ServerError
from ..models import Query, QueryStatus from ..models import Query, QueryStatus
@ -14,6 +16,11 @@ from .utils.mock_data.create_query_run import create_query_run_response
from .utils.mock_data.get_query_results import get_query_results_response from .utils.mock_data.get_query_results import get_query_results_response
from .utils.mock_data.get_query_run import get_query_run_response from .utils.mock_data.get_query_run import get_query_run_response
@pytest.fixture(autouse=True)
def requests_mock_fixture():
with requests_mock.Mocker() as m:
yield m
""" """
Test Defaults Test Defaults
""" """