mirror of
https://github.com/FlipsideCrypto/sdk.git
synced 2026-02-06 02:36:43 +00:00
Format python code.
This commit is contained in:
parent
cdf874ccae
commit
e9e3eccde4
3
.flake8
Normal file
3
.flake8
Normal file
@ -0,0 +1,3 @@
|
||||
# Autoformatter friendly flake8 config (all formatting rules disabled)
|
||||
[flake8]
|
||||
extend-ignore = D1, D2, E1, E2, E3, E501, W1, W2, W3, W5
|
||||
2
.isort.cfg
Normal file
2
.isort.cfg
Normal file
@ -0,0 +1,2 @@
|
||||
[settings]
|
||||
profile=black
|
||||
10
.markdownlint.yaml
Normal file
10
.markdownlint.yaml
Normal file
@ -0,0 +1,10 @@
|
||||
# Autoformatter friendly markdownlint config (all formatting rules disabled)
|
||||
default: true
|
||||
blank_lines: false
|
||||
bullet: false
|
||||
html: false
|
||||
indentation: false
|
||||
line_length: false
|
||||
spaces: false
|
||||
url: false
|
||||
whitespace: false
|
||||
3
.trunk/.gitignore
vendored
Normal file
3
.trunk/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
*out
|
||||
*logs
|
||||
external
|
||||
13
.trunk/trunk.yaml
Normal file
13
.trunk/trunk.yaml
Normal file
@ -0,0 +1,13 @@
|
||||
version: 0.1
|
||||
cli:
|
||||
version: 0.15.0-beta
|
||||
lint:
|
||||
enabled:
|
||||
- actionlint@1.6.15
|
||||
- black@22.6.0
|
||||
- flake8@4.0.1
|
||||
- git-diff-check@SYSTEM
|
||||
- gitleaks@8.8.12
|
||||
- isort@5.10.1
|
||||
- markdownlint@0.32.0
|
||||
- prettier@2.7.1
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@ -1 +1 @@
|
||||
{ "python.analysis.typeCheckingMode": "basic" }
|
||||
{ "python.analysis.typeCheckingMode": "basic", "python.formatting.provider": "black"}
|
||||
@ -1,2 +1,5 @@
|
||||
# trunk-ignore(flake8/F401)
|
||||
from .api import API
|
||||
|
||||
# trunk-ignore(flake8/F401)
|
||||
from .shroomdk import ShroomDK
|
||||
from .api import API
|
||||
@ -1,35 +1,53 @@
|
||||
import requests
|
||||
import json
|
||||
from typing import List
|
||||
|
||||
from .models import (
|
||||
Query
|
||||
)
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter, Retry
|
||||
|
||||
from .models import Query
|
||||
from .models.api import (
|
||||
CreateQueryResp,
|
||||
CreateQueryJson,
|
||||
CreateQueryResp,
|
||||
QueryResultJson,
|
||||
QueryResultResp,
|
||||
QueryResultJson
|
||||
)
|
||||
|
||||
|
||||
class API(object):
|
||||
def __init__(
|
||||
self,
|
||||
base_url: str,
|
||||
api_key: str,
|
||||
max_retries: int = 10,
|
||||
backoff_factor: float = 1,
|
||||
status_forcelist: List[int] = [429, 500, 502, 503, 504],
|
||||
method_allowlist: List[str] = [
|
||||
"HEAD",
|
||||
"GET",
|
||||
"PUT",
|
||||
"POST",
|
||||
"DELETE",
|
||||
"OPTIONS",
|
||||
"TRACE",
|
||||
],
|
||||
):
|
||||
self._base_url = base_url
|
||||
self._api_key = api_key
|
||||
|
||||
def __init__(self, base_url: str, api_key: str):
|
||||
self.base_url = base_url
|
||||
self.headers = {
|
||||
"Accept": "application/json",
|
||||
"Content-Type": "application/json",
|
||||
"x-api-key": api_key,
|
||||
}
|
||||
# Session Settings
|
||||
self._MAX_RETRIES = max_retries
|
||||
self._BACKOFF_FACTOR = backoff_factor
|
||||
self._STATUS_FORCE_LIST = status_forcelist
|
||||
self._METHOD_ALLOWLIST = method_allowlist
|
||||
|
||||
def get_url(self, path: str) -> str:
|
||||
return f"{self.base_url}/{path}"
|
||||
|
||||
return f"{self._base_url}/{path}"
|
||||
|
||||
def create_query(self, query: Query) -> CreateQueryResp:
|
||||
result = requests.post(
|
||||
result = self._session.post(
|
||||
self.get_url("queries"),
|
||||
data=json.dumps(query.dict()),
|
||||
headers=self.headers,
|
||||
headers=self._headers,
|
||||
)
|
||||
|
||||
try:
|
||||
@ -40,15 +58,19 @@ class API(object):
|
||||
return CreateQueryResp(
|
||||
status_code=result.status_code,
|
||||
status_msg=result.reason,
|
||||
error_msg=data.get('errors') if data else None,
|
||||
data=CreateQueryJson(**data) if data and data.get('errors') is None else None,
|
||||
error_msg=data.get("errors") if data else None,
|
||||
data=CreateQueryJson(**data)
|
||||
if data and data.get("errors") is None
|
||||
else None,
|
||||
)
|
||||
|
||||
def get_query_result(self, query_id: str, page_number: int, page_size: int) -> QueryResultResp:
|
||||
result = requests.get(
|
||||
def get_query_result(
|
||||
self, query_id: str, page_number: int, page_size: int
|
||||
) -> QueryResultResp:
|
||||
result = self._session.get(
|
||||
self.get_url(f"queries/{query_id}"),
|
||||
params={"pageNumber": page_number, "pageSize": page_size},
|
||||
headers=self.headers,
|
||||
headers=self._headers,
|
||||
)
|
||||
|
||||
try:
|
||||
@ -59,6 +81,36 @@ class API(object):
|
||||
return QueryResultResp(
|
||||
status_code=result.status_code,
|
||||
status_msg=result.reason,
|
||||
error_msg=data.get('errors') if data else None,
|
||||
data=QueryResultJson(**data) if data and data.get('errors') is None else None,
|
||||
error_msg=data.get("errors") if data else None,
|
||||
data=QueryResultJson(**data)
|
||||
if data and data.get("errors") is None
|
||||
else None,
|
||||
)
|
||||
|
||||
@property
|
||||
def _headers(self) -> dict:
|
||||
return {
|
||||
"Accept": "application/json",
|
||||
"Content-Type": "application/json",
|
||||
"x-api-key": self._api_key,
|
||||
}
|
||||
|
||||
@property
|
||||
def _session(self) -> requests.Session:
|
||||
if hasattr(self, "__session"):
|
||||
return self._session
|
||||
|
||||
retry_strategy = Retry(
|
||||
total=self._MAX_RETRIES,
|
||||
backoff_factor=self._BACKOFF_FACTOR,
|
||||
status_forcelist=self._STATUS_FORCE_LIST,
|
||||
allowed_methods=self._METHOD_ALLOWLIST,
|
||||
)
|
||||
|
||||
adapter = HTTPAdapter(max_retries=retry_strategy)
|
||||
http = requests.Session()
|
||||
http.mount("https://", adapter)
|
||||
http.mount("http://", adapter)
|
||||
|
||||
self.__session = http
|
||||
return self.__session
|
||||
|
||||
@ -1,8 +1,15 @@
|
||||
# trunk-ignore(flake8/F401)
|
||||
from .query_run_errors import (
|
||||
QueryRunExecutionError,
|
||||
QueryRunRateLimitError,
|
||||
QueryRunTimeoutError,
|
||||
QueryRunExecutionError
|
||||
)
|
||||
|
||||
# trunk-ignore(flake8/F401)
|
||||
from .sdk_error import SDKError
|
||||
|
||||
# trunk-ignore(flake8/F401)
|
||||
from .server_error import ServerError
|
||||
|
||||
# trunk-ignore(flake8/F401)
|
||||
from .user_error import UserError
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
|
||||
|
||||
class BaseError(Exception):
|
||||
"""
|
||||
Base error class for all errors in the flipsidecrypto package.
|
||||
"""
|
||||
pass
|
||||
|
||||
pass
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
from typing import Union
|
||||
|
||||
from .base_error import BaseError
|
||||
|
||||
|
||||
@ -6,7 +7,7 @@ class QueryRunRateLimitError(BaseError):
|
||||
"""
|
||||
Base class for all QueryRunRateLimitError errors.
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.message = "QUERY_RUN_RATE_LIMIT_ERROR: you have exceeded the rate limit for creating/running new queries"
|
||||
super().__init__(self.message)
|
||||
@ -16,7 +17,7 @@ class QueryRunTimeoutError(BaseError):
|
||||
"""
|
||||
Base class for all QueryRunTimeoutError errors.
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, timeoutMinutes: Union[int, float]):
|
||||
self.message = f"QUERY_RUN_TIMEOUT_ERROR: your query has timed out after {timeoutMinutes} minutes."
|
||||
super().__init__(self.message)
|
||||
@ -26,7 +27,7 @@ class QueryRunExecutionError(BaseError):
|
||||
"""
|
||||
Base class for all QueryRunExecutionError errors.
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.message = "QUERY_RUN_EXECUTION_ERROR: an error has occured while executing your query."
|
||||
super().__init__(self.message)
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
from typing import Union
|
||||
|
||||
from .base_error import BaseError
|
||||
|
||||
@ -6,7 +7,7 @@ class SDKError(BaseError):
|
||||
"""
|
||||
Base class for all SDK errors.
|
||||
"""
|
||||
|
||||
def __init__(self, message: str):
|
||||
|
||||
def __init__(self, message: Union[str, None]):
|
||||
self.message = message
|
||||
super().__init__(self.message)
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
from typing import Union
|
||||
|
||||
from .base_error import BaseError
|
||||
|
||||
@ -6,7 +7,7 @@ class ServerError(BaseError):
|
||||
"""
|
||||
Base class for all server errors.
|
||||
"""
|
||||
|
||||
def __init__(self, status_code: int, message: str):
|
||||
|
||||
def __init__(self, status_code: int, message: Union[str, None]):
|
||||
self.message = f"unexpected server error occured with status code: {status_code}, msg: {message}"
|
||||
super().__init__(self.message)
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
from typing import Union
|
||||
|
||||
from .base_error import BaseError
|
||||
|
||||
@ -6,7 +7,9 @@ class UserError(BaseError):
|
||||
"""
|
||||
Base class for all user errors.
|
||||
"""
|
||||
|
||||
def __init__(self, status_code: int, message: str):
|
||||
self.message = f"user error occured with status code: {status_code}, msg: {message}"
|
||||
|
||||
def __init__(self, status_code: int, message: Union[str, None]):
|
||||
self.message = (
|
||||
f"user error occured with status code: {status_code}, msg: {message}"
|
||||
)
|
||||
super().__init__(self.message)
|
||||
|
||||
@ -1 +1,2 @@
|
||||
from .query_integration import QueryIntegration
|
||||
# trunk-ignore(flake8/F401)
|
||||
from .query_integration import QueryIntegration
|
||||
|
||||
@ -1 +1,2 @@
|
||||
from .query_integration import QueryIntegration, QueryDefaults
|
||||
# trunk-ignore(flake8/F401)
|
||||
from .query_integration import QueryDefaults, QueryIntegration
|
||||
|
||||
@ -1,40 +1,36 @@
|
||||
from typing import Union
|
||||
|
||||
from shroomdk.api import API
|
||||
from shroomdk.errors import (
|
||||
QueryRunExecutionError,
|
||||
QueryRunTimeoutError,
|
||||
SDKError,
|
||||
ServerError,
|
||||
UserError,
|
||||
)
|
||||
from shroomdk.models import (
|
||||
QueryDefaults,
|
||||
Query,
|
||||
QueryDefaults,
|
||||
QueryResultSet,
|
||||
QueryStatus,
|
||||
SleepConfig
|
||||
)
|
||||
from shroomdk.models.api import (
|
||||
QueryResultJson
|
||||
)
|
||||
from shroomdk.errors import (
|
||||
UserError,
|
||||
QueryRunExecutionError,
|
||||
QueryRunTimeoutError,
|
||||
SDKError,
|
||||
ServerError
|
||||
)
|
||||
from shroomdk.utils.sleep import (
|
||||
get_elapsed_linear_seconds,
|
||||
linear_backoff
|
||||
SleepConfig,
|
||||
)
|
||||
from shroomdk.models.api import QueryResultJson
|
||||
from shroomdk.utils.sleep import get_elapsed_linear_seconds, linear_backoff
|
||||
|
||||
from .query_result_set_builder import QueryResultSetBuilder
|
||||
|
||||
|
||||
DEFAULTS: QueryDefaults = QueryDefaults(
|
||||
ttl_minutes=60,
|
||||
cached=True,
|
||||
timeout_minutes=20,
|
||||
retry_interval_seconds=0.5,
|
||||
page_size=100000,
|
||||
page_number=1,
|
||||
ttl_minutes=60,
|
||||
cached=True,
|
||||
timeout_minutes=20,
|
||||
retry_interval_seconds=0.5,
|
||||
page_size=100000,
|
||||
page_number=1,
|
||||
)
|
||||
|
||||
|
||||
class QueryIntegration(object):
|
||||
|
||||
def __init__(self, api: API, defaults: QueryDefaults = DEFAULTS):
|
||||
self.api = api
|
||||
self.defaults = defaults
|
||||
@ -49,18 +45,22 @@ class QueryIntegration(object):
|
||||
elif created_query.status_code >= 500:
|
||||
raise ServerError(created_query.status_code, created_query.error_msg)
|
||||
else:
|
||||
raise SDKError(f"unknown SDK error when calling `api.create_query`, {created_query.error_msg}")
|
||||
raise SDKError(
|
||||
f"unknown SDK error when calling `api.create_query`, {created_query.error_msg}"
|
||||
)
|
||||
|
||||
query_run = created_query.data
|
||||
if not query_run:
|
||||
raise SDKError( "expected `created_query.data` from server but got `None`")
|
||||
raise SDKError("expected `created_query.data` from server but got `None`")
|
||||
|
||||
query_results = self._get_query_results(
|
||||
query_run.token,
|
||||
page_number=query.page_number,
|
||||
query_run.token,
|
||||
page_number=query.page_number,
|
||||
page_size=query.page_size,
|
||||
timeout_minutes=query.timeout_minutes,
|
||||
timeout_minutes=query.timeout_minutes if query.timeout_minutes else 20,
|
||||
retry_interval_seconds=query.retry_interval_seconds
|
||||
if query.retry_interval_seconds
|
||||
else 1,
|
||||
)
|
||||
|
||||
return QueryResultSetBuilder(query_results).build()
|
||||
@ -68,12 +68,20 @@ class QueryIntegration(object):
|
||||
def _set_query_defaults(self, query: Query) -> Query:
|
||||
query_default_dict = self.defaults.dict()
|
||||
query_dict = query.dict()
|
||||
query_default_dict.update({k:v for (k,v) in query_dict.items() if v is not None})
|
||||
query_default_dict.update(
|
||||
{k: v for (k, v) in query_dict.items() if v is not None}
|
||||
)
|
||||
return Query(**query_default_dict)
|
||||
|
||||
def _get_query_results(self, query_run_id: str, page_number: int = 1,
|
||||
page_size: int = 100000, attempts: int = 0, timeout_minutes: int = 20,
|
||||
retry_interval_seconds: int = 1) -> QueryResultJson:
|
||||
def _get_query_results(
|
||||
self,
|
||||
query_run_id: str,
|
||||
page_number: int = 1,
|
||||
page_size: int = 100000,
|
||||
attempts: int = 0,
|
||||
timeout_minutes: Union[int, float] = 20,
|
||||
retry_interval_seconds: Union[int, float] = 1.0,
|
||||
) -> QueryResultJson:
|
||||
|
||||
query_run = self.api.get_query_result(query_run_id, page_number, page_size)
|
||||
status_code = query_run.status_code
|
||||
@ -88,34 +96,42 @@ class QueryIntegration(object):
|
||||
raise ServerError(status_code, error_msg)
|
||||
|
||||
if not query_run.data:
|
||||
raise SDKError("valid status msg returned from server but no data exists in the response")
|
||||
raise SDKError(
|
||||
"valid status msg returned from server but no data exists in the response"
|
||||
)
|
||||
|
||||
query_status = query_run.data.status
|
||||
|
||||
if query_status == QueryStatus.Finished:
|
||||
return query_run.data
|
||||
|
||||
|
||||
if query_status == QueryStatus.Error:
|
||||
raise QueryRunExecutionError()
|
||||
|
||||
|
||||
should_continue = linear_backoff(
|
||||
SleepConfig(
|
||||
attempts=attempts,
|
||||
timeout_minutes=timeout_minutes,
|
||||
interval_seconds=retry_interval_seconds
|
||||
attempts=attempts,
|
||||
timeout_minutes=timeout_minutes,
|
||||
interval_seconds=retry_interval_seconds,
|
||||
)
|
||||
)
|
||||
|
||||
if not should_continue:
|
||||
elapsed_seconds = get_elapsed_linear_seconds(
|
||||
SleepConfig(
|
||||
attempts=attempts,
|
||||
timeout_minutes=timeout_minutes,
|
||||
interval_seconds=retry_interval_seconds
|
||||
attempts=attempts,
|
||||
timeout_minutes=timeout_minutes,
|
||||
interval_seconds=retry_interval_seconds,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
raise QueryRunTimeoutError(elapsed_seconds)
|
||||
|
||||
return self._get_query_results(query_run_id, page_number, page_size, attempts + 1, timeout_minutes, retry_interval_seconds)
|
||||
|
||||
return self._get_query_results(
|
||||
query_run_id,
|
||||
page_number,
|
||||
page_size,
|
||||
attempts + 1,
|
||||
timeout_minutes,
|
||||
retry_interval_seconds,
|
||||
)
|
||||
|
||||
@ -1,22 +1,18 @@
|
||||
from typing import List, Any, Union
|
||||
from datetime import datetime
|
||||
from typing import List, Union
|
||||
|
||||
from shroomdk.models import QueryResultSet, QueryRunStats
|
||||
from shroomdk.models.api import QueryResultJson
|
||||
from shroomdk.models import (
|
||||
QueryRunStats,
|
||||
QueryResultSet
|
||||
)
|
||||
|
||||
|
||||
class QueryResultSetBuilder(object):
|
||||
|
||||
def __init__(self, data: QueryResultJson):
|
||||
self.query_id = data.queryId
|
||||
self.status = data.status
|
||||
self.columns = data.columnLabels
|
||||
self.column_types = data.columnTypes
|
||||
self.rows = data.results
|
||||
|
||||
|
||||
self.run_stats = self.compute_run_stats(data)
|
||||
self.records = self.create_records(data)
|
||||
|
||||
@ -29,21 +25,21 @@ class QueryResultSetBuilder(object):
|
||||
rows=self.rows,
|
||||
run_stats=self.run_stats,
|
||||
records=self.records,
|
||||
error=None
|
||||
error=None,
|
||||
)
|
||||
|
||||
def compute_run_stats(self, data: QueryResultJson) -> QueryRunStats:
|
||||
if not data.startedAt or not data.endedAt:
|
||||
raise Exception("Query has no data ")
|
||||
start_time = datetime.strptime(data.startedAt, '%Y-%m-%dT%H:%M:%S.%fZ')
|
||||
end_time = datetime.strptime(data.endedAt, '%Y-%m-%dT%H:%M:%S.%fZ')
|
||||
start_time = datetime.strptime(data.startedAt, "%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
end_time = datetime.strptime(data.endedAt, "%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
return QueryRunStats(
|
||||
started_at=start_time,
|
||||
ended_at=end_time,
|
||||
elapsed_seconds=(end_time - start_time).seconds,
|
||||
record_count=len(data.results) if data.results else 0
|
||||
record_count=len(data.results) if data.results else 0,
|
||||
)
|
||||
|
||||
|
||||
def create_records(self, data: QueryResultJson) -> Union[List[dict], None]:
|
||||
if not data or not data.results:
|
||||
return None
|
||||
@ -51,7 +47,7 @@ class QueryResultSetBuilder(object):
|
||||
column_labels = data.columnLabels
|
||||
if not column_labels:
|
||||
return None
|
||||
|
||||
|
||||
records: List[dict] = []
|
||||
for row in data.results:
|
||||
if not row:
|
||||
|
||||
@ -1,6 +1,17 @@
|
||||
from .query_defaults import QueryDefaults
|
||||
from .query_result_set import QueryResultSet
|
||||
from .query_run_stats import QueryRunStats
|
||||
from .query_status import QueryStatus
|
||||
# trunk-ignore(flake8/F401)
|
||||
from .query import Query
|
||||
|
||||
# trunk-ignore(flake8/F401)
|
||||
from .query_defaults import QueryDefaults
|
||||
|
||||
# trunk-ignore(flake8/F401)
|
||||
from .query_result_set import QueryResultSet
|
||||
|
||||
# trunk-ignore(flake8/F401)
|
||||
from .query_run_stats import QueryRunStats
|
||||
|
||||
# trunk-ignore(flake8/F401)
|
||||
from .query_status import QueryStatus
|
||||
|
||||
# trunk-ignore(flake8/F401)
|
||||
from .sleep_config import SleepConfig
|
||||
|
||||
@ -1,3 +1,8 @@
|
||||
# trunk-ignore(flake8/F401)
|
||||
from .api_response import ApiResponse
|
||||
from .create_query_resp import CreateQueryResp, CreateQueryJson
|
||||
|
||||
# trunk-ignore(flake8/F401)
|
||||
from .create_query_resp import CreateQueryJson, CreateQueryResp
|
||||
|
||||
# trunk-ignore(flake8/F401)
|
||||
from .query_result_resp import QueryResultJson, QueryResultResp
|
||||
|
||||
@ -1,9 +1,12 @@
|
||||
from typing import Any, Union
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class ApiResponse(BaseModel):
|
||||
status_code: int = Field(None, description="The server-side token of the query being executed.")
|
||||
status_msg: Union[str, None]
|
||||
error_msg: Union[str, None]
|
||||
data: Union[Any, None]
|
||||
status_code: int = Field(
|
||||
None, description="The server-side token of the query being executed."
|
||||
)
|
||||
status_msg: Union[str, None]
|
||||
error_msg: Union[str, None]
|
||||
data: Union[Any, None]
|
||||
|
||||
@ -1,14 +1,23 @@
|
||||
from typing import Optional, List, Union
|
||||
from pydantic import BaseModel, Field, UUID4, PrivateAttr
|
||||
from typing import Optional, Union
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from .api_response import ApiResponse
|
||||
|
||||
|
||||
class CreateQueryJson(BaseModel):
|
||||
token: str = Field(None, description="The server-side token of the query being executed.")
|
||||
errors: Union[Optional[str], None] = Field(False, description="Error that occured when creating the query.")
|
||||
cached: Optional[bool] = Field(False, description="Whether the query is cached or not.")
|
||||
token: str = Field(
|
||||
None, description="The server-side token of the query being executed."
|
||||
)
|
||||
errors: Union[Optional[str], None] = Field(
|
||||
False, description="Error that occured when creating the query."
|
||||
)
|
||||
cached: Optional[bool] = Field(
|
||||
False, description="Whether the query is cached or not."
|
||||
)
|
||||
|
||||
|
||||
class CreateQueryResp(ApiResponse):
|
||||
data: Union[CreateQueryJson, None] = Field(False, description="The data payload result after attempting to create a query.")
|
||||
data: Union[CreateQueryJson, None] = Field(
|
||||
False, description="The data payload result after attempting to create a query."
|
||||
)
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
from typing import Optional, List, Any, Union
|
||||
from typing import Any, List, Optional, Union
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from .api_response import ApiResponse
|
||||
@ -19,4 +20,7 @@ class QueryResultJson(BaseModel):
|
||||
|
||||
|
||||
class QueryResultResp(ApiResponse):
|
||||
data: Union[QueryResultJson, None] = Field(False, description="The data payload result after attempting to retrieve the query results.")
|
||||
data: Union[QueryResultJson, None] = Field(
|
||||
False,
|
||||
description="The data payload result after attempting to retrieve the query results.",
|
||||
)
|
||||
|
||||
@ -1,12 +1,22 @@
|
||||
from typing import Optional
|
||||
from typing import Optional, Union
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class Query(BaseModel):
|
||||
sql: str = Field(None, description="SQL query to execute")
|
||||
ttl_minutes: Optional[int] = Field(None, description="The number of minutes to cache the query results")
|
||||
timeout_minutes: Optional[int] = Field(None, description="The number of minutes to timeout the query")
|
||||
retry_interval_seconds: Optional[int] = Field(1, description="The number of seconds to use between retries")
|
||||
cached: Optional[bool] = Field(None, description="An override on the cahce. A value of true will reexecute the query.")
|
||||
ttl_minutes: Optional[int] = Field(
|
||||
None, description="The number of minutes to cache the query results"
|
||||
)
|
||||
timeout_minutes: Optional[int] = Field(
|
||||
None, description="The number of minutes to timeout the query"
|
||||
)
|
||||
retry_interval_seconds: Optional[Union[int, float]] = Field(
|
||||
1, description="The number of seconds to use between retries"
|
||||
)
|
||||
cached: Optional[bool] = Field(
|
||||
None,
|
||||
description="An override on the cahce. A value of true will reexecute the query.",
|
||||
)
|
||||
page_size: int = Field(None, description="The number of results to return per page")
|
||||
page_number: int = Field(None, description="The page number to return")
|
||||
|
||||
@ -2,9 +2,15 @@ from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class QueryDefaults(BaseModel):
|
||||
ttl_minutes: int = Field(None, description="The number of minutes to cache the query results")
|
||||
ttl_minutes: int = Field(
|
||||
None, description="The number of minutes to cache the query results"
|
||||
)
|
||||
cached: bool = Field(False, description="Whether or not to cache the query results")
|
||||
timeout_minutes: int = Field(None, description="The number of minutes to timeout the query")
|
||||
retry_interval_seconds: float = Field(None, description="The number of seconds to wait before retrying the query")
|
||||
timeout_minutes: int = Field(
|
||||
None, description="The number of minutes to timeout the query"
|
||||
)
|
||||
retry_interval_seconds: float = Field(
|
||||
None, description="The number of seconds to wait before retrying the query"
|
||||
)
|
||||
page_size: int = Field(None, description="The number of results to return per page")
|
||||
page_number: int = Field(None, description="The page number to return")
|
||||
|
||||
@ -1,15 +1,26 @@
|
||||
from typing import Optional, List, Any, Union
|
||||
from pydantic import BaseModel, Field, UUID4, PrivateAttr
|
||||
from typing import Any, List, Union
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from .query_run_stats import QueryRunStats
|
||||
|
||||
|
||||
class QueryResultSet(BaseModel):
|
||||
query_id: Union[str, None] = Field(None, description="The server id of the query")
|
||||
status: str = Field(False, description="The status of the query (`PENDING`, `FINISHED`, `ERROR`)")
|
||||
columns: Union[List[str], None] = Field(None, description="The names of the columns in the result set")
|
||||
column_types: Union[List[str], None] = Field(None, description="The type of the columns in the result set")
|
||||
status: str = Field(
|
||||
False, description="The status of the query (`PENDING`, `FINISHED`, `ERROR`)"
|
||||
)
|
||||
columns: Union[List[str], None] = Field(
|
||||
None, description="The names of the columns in the result set"
|
||||
)
|
||||
column_types: Union[List[str], None] = Field(
|
||||
None, description="The type of the columns in the result set"
|
||||
)
|
||||
rows: Union[List[Any], None] = Field(None, description="The results of the query")
|
||||
run_stats: Union[QueryRunStats, None] = Field(None, description="Summary stats on the query run (i.e. the number of rows returned, the elapsed time, etc)")
|
||||
records: Union[List[Any], None] = Field(None, description="The results of the query transformed as an array of objects")
|
||||
run_stats: Union[QueryRunStats, None] = Field(
|
||||
None,
|
||||
description="Summary stats on the query run (i.e. the number of rows returned, the elapsed time, etc)",
|
||||
)
|
||||
records: Union[List[Any], None] = Field(
|
||||
None, description="The results of the query transformed as an array of objects"
|
||||
)
|
||||
error: Any
|
||||
|
||||
@ -1,10 +1,15 @@
|
||||
from datetime import datetime
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class QueryRunStats(BaseModel):
|
||||
started_at: datetime = Field(None, description="The start time of the query run.")
|
||||
ended_at: datetime = Field(None, description="The end time of the query run.")
|
||||
elapsed_seconds: int = Field(None, description="The number of seconds elapsed between the start and end times.")
|
||||
record_count: int = Field(False, description="The number of records returned by the query.")
|
||||
|
||||
elapsed_seconds: int = Field(
|
||||
None,
|
||||
description="The number of seconds elapsed between the start and end times.",
|
||||
)
|
||||
record_count: int = Field(
|
||||
False, description="The number of records returned by the query."
|
||||
)
|
||||
|
||||
@ -1,8 +1,9 @@
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Optional, Union
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class SleepConfig(BaseModel):
|
||||
attempts: int
|
||||
timeout_minutes: int
|
||||
timeout_minutes: Union[int, float]
|
||||
interval_seconds: Optional[float]
|
||||
|
||||
@ -1,26 +1,34 @@
|
||||
from .models import Query
|
||||
from .api import API
|
||||
from .integrations import (
|
||||
QueryIntegration
|
||||
)
|
||||
from .integrations import QueryIntegration
|
||||
from .models import Query
|
||||
|
||||
API_BASE_URL = "https://api.flipsidecrypto.com"
|
||||
|
||||
|
||||
class ShroomDK(object):
|
||||
|
||||
def __init__(self, api_key: str, api_base_url: str = API_BASE_URL):
|
||||
self.api = API(api_base_url, api_key)
|
||||
|
||||
def query(self, sql, ttl_minutes=60, cached=True, timeout_minutes=20, retry_interval_seconds=0.5, page_size=100000, page_number=1):
|
||||
def query(
|
||||
self,
|
||||
sql,
|
||||
ttl_minutes=60,
|
||||
cached=True,
|
||||
timeout_minutes=20,
|
||||
retry_interval_seconds=0.5,
|
||||
page_size=100000,
|
||||
page_number=1,
|
||||
):
|
||||
query_integration = QueryIntegration(self.api)
|
||||
|
||||
return query_integration.run(Query(
|
||||
sql=sql,
|
||||
ttl_minutes=ttl_minutes,
|
||||
timeout_minutes=timeout_minutes,
|
||||
retry_interval_seconds=retry_interval_seconds,
|
||||
page_size=page_size,
|
||||
page_number=page_number,
|
||||
cached=cached
|
||||
))
|
||||
return query_integration.run(
|
||||
Query(
|
||||
sql=sql,
|
||||
ttl_minutes=ttl_minutes,
|
||||
timeout_minutes=timeout_minutes,
|
||||
retry_interval_seconds=retry_interval_seconds,
|
||||
page_size=page_size,
|
||||
page_number=page_number,
|
||||
cached=cached,
|
||||
)
|
||||
)
|
||||
|
||||
@ -1,25 +1,17 @@
|
||||
import pytest
|
||||
import requests
|
||||
import json
|
||||
|
||||
from shroomdk.api import API
|
||||
from shroomdk.models import (
|
||||
Query,
|
||||
QueryStatus
|
||||
)
|
||||
from shroomdk.models.api import (
|
||||
QueryResultJson
|
||||
)
|
||||
from shroomdk.errors import (
|
||||
UserError,
|
||||
QueryRunExecutionError,
|
||||
QueryRunTimeoutError,
|
||||
QueryRunExecutionError,
|
||||
QueryRunTimeoutError,
|
||||
SDKError,
|
||||
ServerError
|
||||
ServerError,
|
||||
UserError,
|
||||
)
|
||||
from shroomdk.integrations.query_integration import QueryIntegration
|
||||
from shroomdk.integrations.query_integration.query_integration import (
|
||||
DEFAULTS
|
||||
)
|
||||
from shroomdk.integrations.query_integration.query_integration import DEFAULTS
|
||||
from shroomdk.models import Query, QueryStatus
|
||||
from shroomdk.models.api import QueryResultJson
|
||||
|
||||
|
||||
def get_api():
|
||||
@ -30,20 +22,19 @@ def test_query_defaults():
|
||||
qi = QueryIntegration(get_api())
|
||||
|
||||
# Test that the defaults are semi-overridden
|
||||
q = Query(sql="", ttl_minutes=5, page_number=5, page_size=10)
|
||||
q = Query(sql="", ttl_minutes=5, page_number=5, page_size=10) # type: ignore
|
||||
next_q = qi._set_query_defaults(q)
|
||||
|
||||
|
||||
assert next_q.page_number == 5
|
||||
assert next_q.page_size == 10
|
||||
assert next_q.ttl_minutes == 5
|
||||
assert next_q.cached == DEFAULTS.cached
|
||||
assert next_q.timeout_minutes == DEFAULTS.timeout_minutes
|
||||
|
||||
|
||||
# Test that the defaults are not overridden
|
||||
q = Query(sql="")
|
||||
q = Query(sql="") # type: ignore
|
||||
next_q = qi._set_query_defaults(q)
|
||||
|
||||
|
||||
assert next_q.page_number == DEFAULTS.page_number
|
||||
assert next_q.page_size == DEFAULTS.page_size
|
||||
assert next_q.ttl_minutes == DEFAULTS.ttl_minutes
|
||||
@ -56,54 +47,50 @@ def test_run_failed_to_create_query(requests_mock):
|
||||
qi = QueryIntegration(api)
|
||||
|
||||
# Test 400 error
|
||||
q = Query(sql="", ttl_minutes=5, page_number=5, page_size=10)
|
||||
result = requests_mock.post(
|
||||
q = Query(sql="", ttl_minutes=5, page_number=5, page_size=10) # type: ignore
|
||||
requests_mock.post(
|
||||
api.get_url("queries"),
|
||||
text=json.dumps({"errors": "user_error"}),
|
||||
status_code=400,
|
||||
reason="User Error"
|
||||
reason="User Error",
|
||||
)
|
||||
|
||||
try:
|
||||
result = qi.run(q)
|
||||
qi.run(q)
|
||||
except UserError as e:
|
||||
assert type(e) == UserError
|
||||
|
||||
# Test 500 error
|
||||
result = requests_mock.post(
|
||||
requests_mock.post(
|
||||
api.get_url("queries"),
|
||||
text=json.dumps({"errors": "server_error"}),
|
||||
status_code=500,
|
||||
reason="Server Error"
|
||||
reason="Server Error",
|
||||
)
|
||||
|
||||
try:
|
||||
result = qi.run(q)
|
||||
qi.run(q)
|
||||
except ServerError as e:
|
||||
assert type(e) == ServerError
|
||||
|
||||
# Unknown SDK Error
|
||||
result = requests_mock.post(
|
||||
requests_mock.post(
|
||||
api.get_url("queries"),
|
||||
text=json.dumps({"errors": "unknown_error"}),
|
||||
status_code=300,
|
||||
reason="Unknown Error"
|
||||
reason="Unknown Error",
|
||||
)
|
||||
|
||||
try:
|
||||
result = qi.run(q)
|
||||
qi.run(q)
|
||||
except SDKError as e:
|
||||
assert type(e) == SDKError
|
||||
|
||||
# No query run data
|
||||
result = requests_mock.post(
|
||||
api.get_url("queries"),
|
||||
status_code=200,
|
||||
reason="OK"
|
||||
)
|
||||
requests_mock.post(api.get_url("queries"), status_code=200, reason="OK")
|
||||
|
||||
try:
|
||||
result = qi.run(q)
|
||||
qi.run(q)
|
||||
except SDKError as e:
|
||||
assert type(e) == SDKError
|
||||
|
||||
@ -124,7 +111,7 @@ def test_get_query_results(requests_mock):
|
||||
api.get_url(f"queries/{query_id}"),
|
||||
text=json.dumps(query_result_json),
|
||||
status_code=200,
|
||||
reason="OK"
|
||||
reason="OK",
|
||||
)
|
||||
|
||||
try:
|
||||
@ -134,7 +121,7 @@ def test_get_query_results(requests_mock):
|
||||
page_size=page_size,
|
||||
attempts=0,
|
||||
timeout_minutes=1,
|
||||
retry_interval_seconds=0.0001
|
||||
retry_interval_seconds=0.0001,
|
||||
)
|
||||
except QueryRunExecutionError as e:
|
||||
assert type(e) == QueryRunExecutionError
|
||||
@ -146,7 +133,7 @@ def test_get_query_results(requests_mock):
|
||||
api.get_url(f"queries/{query_id}"),
|
||||
text=json.dumps(query_result_json),
|
||||
status_code=200,
|
||||
reason="OK"
|
||||
reason="OK",
|
||||
)
|
||||
|
||||
result = qi._get_query_results(
|
||||
@ -155,9 +142,11 @@ def test_get_query_results(requests_mock):
|
||||
page_size=page_size,
|
||||
attempts=0,
|
||||
timeout_minutes=1,
|
||||
retry_interval_seconds=0.0001
|
||||
retry_interval_seconds=0.0001,
|
||||
)
|
||||
assert len(result.results) == len(query_result_json['results'])
|
||||
assert result.results is not None
|
||||
assert type(result.results) is list
|
||||
assert len(result.results) == len(query_result_json["results"])
|
||||
|
||||
# Query Execution Error
|
||||
query_result_json = getQueryResultSetData(QueryStatus.Error).dict()
|
||||
@ -166,7 +155,7 @@ def test_get_query_results(requests_mock):
|
||||
api.get_url(f"queries/{query_id}"),
|
||||
text=json.dumps(query_result_json),
|
||||
status_code=200,
|
||||
reason="OK"
|
||||
reason="OK",
|
||||
)
|
||||
|
||||
try:
|
||||
@ -181,7 +170,7 @@ def test_get_query_results(requests_mock):
|
||||
api.get_url(f"queries/{query_id}"),
|
||||
text=json.dumps(query_result_json),
|
||||
status_code=200,
|
||||
reason="OK"
|
||||
reason="OK",
|
||||
)
|
||||
|
||||
try:
|
||||
@ -191,16 +180,14 @@ def test_get_query_results(requests_mock):
|
||||
page_size=page_size,
|
||||
attempts=0,
|
||||
timeout_minutes=0.1,
|
||||
retry_interval_seconds=0.0001
|
||||
retry_interval_seconds=0.0001,
|
||||
)
|
||||
except QueryRunTimeoutError as e:
|
||||
assert type(e) == QueryRunTimeoutError
|
||||
|
||||
# User Error
|
||||
result = requests_mock.get(
|
||||
api.get_url(f"queries/{query_id}"),
|
||||
status_code=400,
|
||||
reason="user_error"
|
||||
api.get_url(f"queries/{query_id}"), status_code=400, reason="user_error"
|
||||
)
|
||||
|
||||
try:
|
||||
@ -210,9 +197,7 @@ def test_get_query_results(requests_mock):
|
||||
|
||||
# Server Error
|
||||
result = requests_mock.get(
|
||||
api.get_url(f"queries/{query_id}"),
|
||||
status_code=500,
|
||||
reason="server error"
|
||||
api.get_url(f"queries/{query_id}"), status_code=500, reason="server error"
|
||||
)
|
||||
|
||||
try:
|
||||
@ -222,9 +207,7 @@ def test_get_query_results(requests_mock):
|
||||
|
||||
# SDK Error
|
||||
result = requests_mock.get(
|
||||
api.get_url(f"queries/{query_id}"),
|
||||
status_code=200,
|
||||
reason="ok"
|
||||
api.get_url(f"queries/{query_id}"), status_code=200, reason="ok"
|
||||
)
|
||||
|
||||
try:
|
||||
@ -256,5 +239,5 @@ def getQueryResultSetData(status: str) -> QueryResultJson:
|
||||
message="",
|
||||
errors=None,
|
||||
pageSize=100,
|
||||
pageNumber=0
|
||||
)
|
||||
pageNumber=0,
|
||||
)
|
||||
|
||||
@ -1,10 +1,11 @@
|
||||
from shroomdk.integrations.query_integration.query_result_set_builder import QueryResultSetBuilder
|
||||
from shroomdk.models.api import (
|
||||
QueryResultJson
|
||||
)
|
||||
from shroomdk.models.query_status import QueryStatus
|
||||
from datetime import datetime
|
||||
|
||||
from shroomdk.integrations.query_integration.query_result_set_builder import (
|
||||
QueryResultSetBuilder,
|
||||
)
|
||||
from shroomdk.models.api import QueryResultJson
|
||||
from shroomdk.models.query_status import QueryStatus
|
||||
|
||||
|
||||
def getQueryResultSetData(status: str) -> QueryResultJson:
|
||||
return QueryResultJson(
|
||||
@ -29,7 +30,7 @@ def getQueryResultSetData(status: str) -> QueryResultJson:
|
||||
message="",
|
||||
errors=None,
|
||||
pageSize=100,
|
||||
pageNumber=0
|
||||
pageNumber=0,
|
||||
)
|
||||
|
||||
|
||||
@ -51,10 +52,15 @@ def test_records():
|
||||
qr = QueryResultSetBuilder(getQueryResultSetData(QueryStatus.Finished))
|
||||
|
||||
# Records Length Matches Row Length?
|
||||
assert qr.records is not None
|
||||
assert qr.rows is not None
|
||||
assert qr.columns is not None
|
||||
assert len(qr.records) == len(qr.rows)
|
||||
|
||||
# Column Length Matches Records Key Length
|
||||
for record in qr.records:
|
||||
assert record is not None
|
||||
|
||||
assert len(record.keys()) == len(qr.columns)
|
||||
|
||||
# Columns = Record Keys
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
from shroomdk.models.query_status import (
|
||||
QueryStatus,
|
||||
QueryStatusError,
|
||||
QueryStatusFinished,
|
||||
QueryStatusPending,
|
||||
QueryStatusError,
|
||||
QueryStatus
|
||||
)
|
||||
|
||||
|
||||
|
||||
@ -1,12 +1,9 @@
|
||||
import json
|
||||
|
||||
from shroomdk.api import API
|
||||
from shroomdk.models import (
|
||||
Query,
|
||||
QueryStatus
|
||||
)
|
||||
from shroomdk.models.api import (
|
||||
QueryResultJson,
|
||||
)
|
||||
from shroomdk.models import Query, QueryStatus
|
||||
from shroomdk.models.api import QueryResultJson
|
||||
|
||||
|
||||
def test_create_query_success(requests_mock):
|
||||
api = API("https://api.flipsidecrypto.xyz", "api_key")
|
||||
@ -15,17 +12,16 @@ def test_create_query_success(requests_mock):
|
||||
api.get_url("queries"),
|
||||
text=json.dumps({"token": "mytoken", "cached": False}),
|
||||
status_code=200,
|
||||
reason="OK"
|
||||
reason="OK",
|
||||
)
|
||||
|
||||
q = Query(
|
||||
sql="SELECT * FROM mytable",
|
||||
ttl_minutes=5
|
||||
)
|
||||
q = Query(sql="SELECT * FROM mytable", ttl_minutes=5) # type: ignore
|
||||
|
||||
result = api.create_query(q)
|
||||
|
||||
assert result.data is not None
|
||||
assert result.data.token == "mytoken"
|
||||
assert result.data.cached == False
|
||||
assert result.data.cached is False
|
||||
assert result.status_code == 200
|
||||
|
||||
|
||||
@ -36,13 +32,10 @@ def test_create_query_user_error(requests_mock):
|
||||
api.get_url("queries"),
|
||||
text=json.dumps({"errors": "user_error"}),
|
||||
status_code=400,
|
||||
reason="User Error"
|
||||
reason="User Error",
|
||||
)
|
||||
|
||||
q = Query(
|
||||
sql="SELECT * FROM mytable",
|
||||
ttl_minutes=5
|
||||
)
|
||||
q = Query(sql="SELECT * FROM mytable", ttl_minutes=5) # type: ignore
|
||||
|
||||
result = api.create_query(q)
|
||||
assert result.data is None
|
||||
@ -55,18 +48,16 @@ def test_create_query_server_error(requests_mock):
|
||||
api = API("https://api.flipsidecrypto.xyz", "api_key")
|
||||
|
||||
result = requests_mock.post(
|
||||
api.get_url("queries"),
|
||||
status_code=500,
|
||||
reason="Server Error"
|
||||
api.get_url("queries"), status_code=500, reason="Server Error"
|
||||
)
|
||||
|
||||
q = Query(sql="SELECT * FROM mytable", ttl_minutes=5)
|
||||
q = Query(sql="SELECT * FROM mytable", ttl_minutes=5) # type: ignore
|
||||
|
||||
result = api.create_query(q)
|
||||
assert result.data is None
|
||||
assert result.status_msg == "Server Error"
|
||||
assert result.status_code == 500
|
||||
assert result.error_msg == None
|
||||
assert result.error_msg is None
|
||||
|
||||
|
||||
def getQueryResultSetData(status: str) -> QueryResultJson:
|
||||
@ -92,7 +83,7 @@ def getQueryResultSetData(status: str) -> QueryResultJson:
|
||||
message="",
|
||||
errors=None,
|
||||
pageSize=100,
|
||||
pageNumber=0
|
||||
pageNumber=0,
|
||||
)
|
||||
|
||||
|
||||
@ -108,11 +99,11 @@ def test_get_query_result(requests_mock):
|
||||
api.get_url(f"queries/{query_id}"),
|
||||
text=json.dumps(query_result_json),
|
||||
status_code=200,
|
||||
reason="OK"
|
||||
reason="OK",
|
||||
)
|
||||
|
||||
result = api.get_query_result(query_id, page_number, page_size)
|
||||
assert result.data != None
|
||||
assert result.data is not None
|
||||
assert result.status_code == 200
|
||||
|
||||
|
||||
@ -126,11 +117,11 @@ def test_get_query_result_user_error(requests_mock):
|
||||
api.get_url(f"queries/{query_id}"),
|
||||
text=json.dumps({"errors": "user_error"}),
|
||||
status_code=400,
|
||||
reason="User Error"
|
||||
reason="User Error",
|
||||
)
|
||||
|
||||
result = api.get_query_result(query_id, page_number, page_size)
|
||||
assert result.data == None
|
||||
assert result.data is None
|
||||
assert result.status_msg == "User Error"
|
||||
assert result.status_code == 400
|
||||
assert result.error_msg == "user_error"
|
||||
@ -143,13 +134,11 @@ def test_get_query_result_server_error(requests_mock):
|
||||
page_size = 10
|
||||
|
||||
result = requests_mock.get(
|
||||
api.get_url(f"queries/{query_id}"),
|
||||
status_code=500,
|
||||
reason="Server Error"
|
||||
api.get_url(f"queries/{query_id}"), status_code=500, reason="Server Error"
|
||||
)
|
||||
|
||||
result = api.get_query_result(query_id, page_number, page_size)
|
||||
assert result.data == None
|
||||
assert result.data is None
|
||||
assert result.status_msg == "Server Error"
|
||||
assert result.status_code == 500
|
||||
assert result.error_msg is None
|
||||
|
||||
@ -1,12 +1,12 @@
|
||||
from shroomdk.models.sleep_config import SleepConfig
|
||||
from shroomdk.utils.sleep import (
|
||||
sec_to_ms,
|
||||
exp_backoff,
|
||||
get_elapsed_exp_seconds,
|
||||
get_elapsed_linear_seconds,
|
||||
get_exp_backoff_seconds,
|
||||
get_linear_backoff_seconds,
|
||||
get_elapsed_exp_seconds,
|
||||
exp_backoff,
|
||||
get_elapsed_linear_seconds,
|
||||
linear_backoff
|
||||
linear_backoff,
|
||||
sec_to_ms,
|
||||
)
|
||||
|
||||
|
||||
@ -46,6 +46,7 @@ def test_exp_backoff():
|
||||
should_continue = exp_backoff(sc)
|
||||
assert should_continue is False
|
||||
|
||||
|
||||
def test_get_elapsed_linear_seconds():
|
||||
sc = SleepConfig(attempts=0, interval_seconds=5, timeout_minutes=5)
|
||||
assert get_elapsed_linear_seconds(sc) == 0
|
||||
|
||||
Loading…
Reference in New Issue
Block a user