Add support for pydantic2.0

This commit is contained in:
Jim Myers 2024-03-07 17:02:51 -05:00
parent 8b98a4b924
commit 39a7e70b11
13 changed files with 88 additions and 72 deletions

1
.gitignore vendored
View File

@ -32,3 +32,4 @@ r/shroomDK_0.1.0.tar.gz
python-sdk-example.py
r/shroomDK/api_key.txt
r/shroomDK/test_of_page2_issue.R
test-script.py

View File

@ -1,2 +1,2 @@
pydantic==1.10.9
pydantic==2.6.3
requests==2.29.0

View File

@ -1,5 +1,4 @@
from typing import List, Union
from typing import List, Optional, Union
from pydantic import BaseModel
from .core.query_run import QueryRun
@ -11,16 +10,13 @@ from .core.rpc_response import RpcResponse
class CancelQueryRunRpcRequestParams(BaseModel):
queryRunId: str
class CancelQueryRunRpcRequest(RpcRequest):
method: str = "cancelQueryRun"
params: List[CancelQueryRunRpcRequestParams]
# Response
class CancelQueryRunRpcResult(BaseModel):
queryRun: QueryRun
class CancelQueryRunRpcResponse(RpcResponse):
result: Union[CancelQueryRunRpcResult, None]
result: Optional[CancelQueryRunRpcResult]

View File

@ -11,23 +11,23 @@ class QueryRun(BaseModel):
sqlStatementId: str
state: str
path: str
fileCount: Optional[int]
lastFileNumber: Optional[int]
fileNames: Optional[str]
errorName: Optional[str]
errorMessage: Optional[str]
errorData: Optional[Any]
dataSourceQueryId: Optional[str]
dataSourceSessionId: Optional[str]
startedAt: Optional[str]
queryRunningEndedAt: Optional[str]
queryStreamingEndedAt: Optional[str]
endedAt: Optional[str]
rowCount: Optional[int]
totalSize: Optional[int]
fileCount: Optional[int] = None
lastFileNumber: Optional[int] = None
fileNames: Optional[str] = None
errorName: Optional[str] = None
errorMessage: Optional[str] = None
errorData: Optional[Any] = None
dataSourceQueryId: Optional[str] = None
dataSourceSessionId: Optional[str] = None
startedAt: Optional[str] = None
queryRunningEndedAt: Optional[str] = None
queryStreamingEndedAt: Optional[str] = None
endedAt: Optional[str] = None
rowCount: Optional[int] = None
totalSize: Optional[int] = None
tags: Tags
dataSourceId: str
userId: str
createdAt: str
updatedAt: datetime
archivedAt: Optional[datetime]
archivedAt: Optional[datetime] = None

View File

@ -6,4 +6,4 @@ from pydantic import BaseModel
class RpcError(BaseModel):
code: int
message: str
data: Optional[Any]
data: Optional[Any] = None

View File

@ -9,4 +9,4 @@ class RpcResponse(BaseModel):
jsonrpc: str
id: int
result: Union[Optional[Dict[str, Any]], None]
error: Optional[RpcError]
error: Optional[Union[RpcError, None]] = None

View File

@ -1,4 +1,4 @@
from typing import Any, Dict, List, Optional
from typing import Optional
from pydantic import BaseModel

View File

@ -5,6 +5,6 @@ from pydantic import BaseModel
class Tags(BaseModel):
sdk_package: Optional[str]
sdk_version: Optional[str]
sdk_language: Optional[str]
sdk_package: Optional[str] = None
sdk_version: Optional[str] = None
sdk_language: Optional[str] = None

View File

@ -1,5 +1,4 @@
from typing import Any, Dict, List, Union
from typing import List, Optional, Any
from pydantic import BaseModel
from .core.query_request import QueryRequest
@ -19,18 +18,17 @@ class CreateQueryRunRpcParams(BaseModel):
dataSource: str
dataProvider: str
class CreateQueryRunRpcRequest(RpcRequest):
method: str = "createQueryRun"
params: List[CreateQueryRunRpcParams]
# Response
class CreateQueryRunRpcResult(BaseModel):
queryRequest: QueryRequest
queryRun: QueryRun
sqlStatement: SqlStatement
class CreateQueryRunRpcResponse(RpcResponse):
result: Union[CreateQueryRunRpcResult, None]
id: Optional[Any]
jsonrpc: Optional[str]
result: Optional[CreateQueryRunRpcResult]

View File

@ -1,10 +1,11 @@
from typing import Optional, Union
from pydantic import BaseModel, Field
class Query(BaseModel):
sql: str = Field(None, description="SQL query to execute")
sql: Optional[str] = Field(
None, description="SQL query to execute"
)
ttl_minutes: Optional[int] = Field(
None, description="The number of minutes to cache the query results"
)
@ -21,8 +22,12 @@ class Query(BaseModel):
None,
description="An override on the cache. A value of true will Re-Execute the query.",
)
page_size: int = Field(None, description="The number of results to return per page")
page_number: int = Field(None, description="The page number to return")
page_size: Optional[int] = Field(
None, description="The number of results to return per page"
)
page_number: Optional[int] = Field(
None, description="The page number to return"
)
sdk_package: Optional[str] = Field(
None, description="The SDK package used for the query"
)

View File

@ -1,20 +1,26 @@
from typing import Optional
from pydantic import BaseModel, Field
class QueryDefaults(BaseModel):
ttl_minutes: int = Field(
ttl_minutes: Optional[int] = Field(
None, description="The number of minutes to cache the query results"
)
max_age_minutes: int = Field(
max_age_minutes: Optional[int] = Field(
None,
description="The max age of query results to accept before deciding to run a query again",
)
cached: bool = Field(False, description="Whether or not to cache the query results")
timeout_minutes: int = Field(
cached: bool = Field(
False, description="Whether or not to cache the query results"
)
timeout_minutes: Optional[int] = Field(
None, description="The number of minutes to timeout the query"
)
retry_interval_seconds: float = Field(
retry_interval_seconds: Optional[float] = Field(
None, description="The number of seconds to wait before retrying the query"
)
page_size: int = Field(None, description="The number of results to return per page")
page_number: int = Field(None, description="The page number to return")
page_size: Optional[int] = Field(
None, description="The number of results to return per page"
)
page_number: Optional[int] = Field(
None, description="The page number to return"
)

View File

@ -1,5 +1,4 @@
from typing import Any, List, Union
from typing import Any, List, Optional
from pydantic import BaseModel, Field
from .compass.core.page_stats import PageStats
@ -7,26 +6,31 @@ from .query_run_stats import QueryRunStats
class QueryResultSet(BaseModel):
query_id: Union[str, None] = Field(None, description="The server id of the query")
status: str = Field(
False, description="The status of the query (`PENDING`, `FINISHED`, `ERROR`)"
query_id: Optional[str] = Field(
None, description="The server id of the query"
)
columns: Union[List[str], None] = Field(
status: str = Field(
..., description="The status of the query (`PENDING`, `FINISHED`, `ERROR`)"
)
columns: Optional[List[str]] = Field(
None, description="The names of the columns in the result set"
)
column_types: Union[List[str], None] = Field(
column_types: Optional[List[str]] = Field(
None, description="The type of the columns in the result set"
)
rows: Union[List[Any], None] = Field(None, description="The results of the query")
run_stats: Union[QueryRunStats, None] = Field(
rows: Optional[List[Any]] = Field(
None, description="The results of the query"
)
run_stats: Optional[QueryRunStats] = Field(
None,
description="Summary stats on the query run (i.e. the number of rows returned, the elapsed time, etc)",
)
records: Union[List[Any], None] = Field(
records: Optional[List[Any]] = Field(
None, description="The results of the query transformed as an array of objects"
)
page: Union[PageStats, None] = Field(
page: Optional[PageStats] = Field(
None, description="Summary of page stats for this query result set"
)
error: Any
error: Any = Field(
..., description="The error information if the query fails"
)

View File

@ -1,40 +1,46 @@
from typing import Optional
from datetime import datetime
from pydantic import BaseModel, Field
class QueryRunStats(BaseModel):
started_at: datetime = Field(None, description="The start time of the query run.")
ended_at: datetime = Field(None, description="The end time of the query run.")
query_exec_started_at: datetime = Field(
started_at: Optional[datetime] = Field(
None, description="The start time of the query run."
)
ended_at: Optional[datetime] = Field(
None, description="The end time of the query run."
)
query_exec_started_at: Optional[datetime] = Field(
None, description="The start time of query execution."
)
query_exec_ended_at: datetime = Field(
query_exec_ended_at: Optional[datetime] = Field(
None, description="The end time of query execution."
)
streaming_started_at: datetime = Field(
streaming_started_at: Optional[datetime] = Field(
None, description="The start time of streaming query results."
)
streaming_ended_at: datetime = Field(
streaming_ended_at: Optional[datetime] = Field(
None, description="The end time of streaming query results."
)
elapsed_seconds: int = Field(
elapsed_seconds: Optional[int] = Field(
None,
description="The number of seconds elapsed between the start and end times.",
)
queued_seconds: int = Field(
queued_seconds: Optional[int] = Field(
None,
description="The number of seconds elapsed between when the query was created and when execution on the data source began.",
)
streaming_seconds: int = Field(
streaming_seconds: Optional[int] = Field(
None,
description="The number of seconds elapsed between when the query execution completed and results were fully streamed to Flipside's servers.",
)
query_exec_seconds: int = Field(
query_exec_seconds: Optional[int] = Field(
None,
description="The number of seconds elapsed between when the query execution started and when it completed on the data source.",
)
record_count: int = Field(
record_count: Optional[int] = Field(
None, description="The number of records returned by the query."
)
bytes: int = Field(None, description="The number of bytes returned by the query.")
bytes: Optional[int] = Field(
None, description="The number of bytes returned by the query."
)