diff --git a/robosystems_client/api/agent/auto_select_agent.py b/robosystems_client/api/agent/auto_select_agent.py index 3f5f14f..ba6eb8b 100644 --- a/robosystems_client/api/agent/auto_select_agent.py +++ b/robosystems_client/api/agent/auto_select_agent.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -17,13 +17,13 @@ def _get_kwargs( graph_id: str, *, body: AgentRequest, - mode: Union[None, ResponseMode, Unset] = UNSET, + mode: None | ResponseMode | Unset = UNSET, ) -> dict[str, Any]: headers: dict[str, Any] = {} params: dict[str, Any] = {} - json_mode: Union[None, Unset, str] + json_mode: None | str | Unset if isinstance(mode, Unset): json_mode = UNSET elif isinstance(mode, ResponseMode): @@ -49,8 +49,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[AgentResponse, Any, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> AgentResponse | Any | ErrorResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = AgentResponse.from_dict(response.json()) @@ -89,8 +89,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[AgentResponse, Any, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[AgentResponse | Any | ErrorResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -104,8 +104,8 @@ def sync_detailed( *, client: AuthenticatedClient, body: AgentRequest, - mode: Union[None, ResponseMode, Unset] = UNSET, -) -> Response[Union[AgentResponse, Any, ErrorResponse, HTTPValidationError]]: + mode: None | ResponseMode | Unset = UNSET, +) -> Response[AgentResponse | Any | ErrorResponse | HTTPValidationError]: r"""Auto-select agent for query Automatically select the best agent for your query with intelligent execution strategy. @@ -169,8 +169,7 @@ def sync_detailed( Args: graph_id (str): - mode (Union[None, ResponseMode, Unset]): Override execution mode: sync, async, stream, or - auto + mode (None | ResponseMode | Unset): Override execution mode: sync, async, stream, or auto body (AgentRequest): Request model for agent interactions. Raises: @@ -178,7 +177,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AgentResponse, Any, ErrorResponse, HTTPValidationError]] + Response[AgentResponse | Any | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -199,8 +198,8 @@ def sync( *, client: AuthenticatedClient, body: AgentRequest, - mode: Union[None, ResponseMode, Unset] = UNSET, -) -> Optional[Union[AgentResponse, Any, ErrorResponse, HTTPValidationError]]: + mode: None | ResponseMode | Unset = UNSET, +) -> AgentResponse | Any | ErrorResponse | HTTPValidationError | None: r"""Auto-select agent for query Automatically select the best agent for your query with intelligent execution strategy. @@ -264,8 +263,7 @@ def sync( Args: graph_id (str): - mode (Union[None, ResponseMode, Unset]): Override execution mode: sync, async, stream, or - auto + mode (None | ResponseMode | Unset): Override execution mode: sync, async, stream, or auto body (AgentRequest): Request model for agent interactions. Raises: @@ -273,7 +271,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AgentResponse, Any, ErrorResponse, HTTPValidationError] + AgentResponse | Any | ErrorResponse | HTTPValidationError """ return sync_detailed( @@ -289,8 +287,8 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: AgentRequest, - mode: Union[None, ResponseMode, Unset] = UNSET, -) -> Response[Union[AgentResponse, Any, ErrorResponse, HTTPValidationError]]: + mode: None | ResponseMode | Unset = UNSET, +) -> Response[AgentResponse | Any | ErrorResponse | HTTPValidationError]: r"""Auto-select agent for query Automatically select the best agent for your query with intelligent execution strategy. @@ -354,8 +352,7 @@ async def asyncio_detailed( Args: graph_id (str): - mode (Union[None, ResponseMode, Unset]): Override execution mode: sync, async, stream, or - auto + mode (None | ResponseMode | Unset): Override execution mode: sync, async, stream, or auto body (AgentRequest): Request model for agent interactions. Raises: @@ -363,7 +360,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AgentResponse, Any, ErrorResponse, HTTPValidationError]] + Response[AgentResponse | Any | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -382,8 +379,8 @@ async def asyncio( *, client: AuthenticatedClient, body: AgentRequest, - mode: Union[None, ResponseMode, Unset] = UNSET, -) -> Optional[Union[AgentResponse, Any, ErrorResponse, HTTPValidationError]]: + mode: None | ResponseMode | Unset = UNSET, +) -> AgentResponse | Any | ErrorResponse | HTTPValidationError | None: r"""Auto-select agent for query Automatically select the best agent for your query with intelligent execution strategy. @@ -447,8 +444,7 @@ async def asyncio( Args: graph_id (str): - mode (Union[None, ResponseMode, Unset]): Override execution mode: sync, async, stream, or - auto + mode (None | ResponseMode | Unset): Override execution mode: sync, async, stream, or auto body (AgentRequest): Request model for agent interactions. Raises: @@ -456,7 +452,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AgentResponse, Any, ErrorResponse, HTTPValidationError] + AgentResponse | Any | ErrorResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/agent/batch_process_queries.py b/robosystems_client/api/agent/batch_process_queries.py index 91e82b7..7b632e8 100644 --- a/robosystems_client/api/agent/batch_process_queries.py +++ b/robosystems_client/api/agent/batch_process_queries.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -32,8 +32,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, BatchAgentResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | BatchAgentResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = BatchAgentResponse.from_dict(response.json()) @@ -63,8 +63,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, BatchAgentResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | BatchAgentResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -78,7 +78,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: BatchAgentRequest, -) -> Response[Union[Any, BatchAgentResponse, HTTPValidationError]]: +) -> Response[Any | BatchAgentResponse | HTTPValidationError]: """Batch process multiple queries Process multiple queries either sequentially or in parallel. @@ -105,7 +105,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, BatchAgentResponse, HTTPValidationError]] + Response[Any | BatchAgentResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -125,7 +125,7 @@ def sync( *, client: AuthenticatedClient, body: BatchAgentRequest, -) -> Optional[Union[Any, BatchAgentResponse, HTTPValidationError]]: +) -> Any | BatchAgentResponse | HTTPValidationError | None: """Batch process multiple queries Process multiple queries either sequentially or in parallel. @@ -152,7 +152,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, BatchAgentResponse, HTTPValidationError] + Any | BatchAgentResponse | HTTPValidationError """ return sync_detailed( @@ -167,7 +167,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: BatchAgentRequest, -) -> Response[Union[Any, BatchAgentResponse, HTTPValidationError]]: +) -> Response[Any | BatchAgentResponse | HTTPValidationError]: """Batch process multiple queries Process multiple queries either sequentially or in parallel. @@ -194,7 +194,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, BatchAgentResponse, HTTPValidationError]] + Response[Any | BatchAgentResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -212,7 +212,7 @@ async def asyncio( *, client: AuthenticatedClient, body: BatchAgentRequest, -) -> Optional[Union[Any, BatchAgentResponse, HTTPValidationError]]: +) -> Any | BatchAgentResponse | HTTPValidationError | None: """Batch process multiple queries Process multiple queries either sequentially or in parallel. @@ -239,7 +239,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, BatchAgentResponse, HTTPValidationError] + Any | BatchAgentResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/agent/execute_specific_agent.py b/robosystems_client/api/agent/execute_specific_agent.py index 286e47f..636f67d 100644 --- a/robosystems_client/api/agent/execute_specific_agent.py +++ b/robosystems_client/api/agent/execute_specific_agent.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -18,13 +18,13 @@ def _get_kwargs( agent_type: str, *, body: AgentRequest, - mode: Union[None, ResponseMode, Unset] = UNSET, + mode: None | ResponseMode | Unset = UNSET, ) -> dict[str, Any]: headers: dict[str, Any] = {} params: dict[str, Any] = {} - json_mode: Union[None, Unset, str] + json_mode: None | str | Unset if isinstance(mode, Unset): json_mode = UNSET elif isinstance(mode, ResponseMode): @@ -50,8 +50,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[AgentResponse, Any, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> AgentResponse | Any | ErrorResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = AgentResponse.from_dict(response.json()) @@ -94,8 +94,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[AgentResponse, Any, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[AgentResponse | Any | ErrorResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -110,8 +110,8 @@ def sync_detailed( *, client: AuthenticatedClient, body: AgentRequest, - mode: Union[None, ResponseMode, Unset] = UNSET, -) -> Response[Union[AgentResponse, Any, ErrorResponse, HTTPValidationError]]: + mode: None | ResponseMode | Unset = UNSET, +) -> Response[AgentResponse | Any | ErrorResponse | HTTPValidationError]: """Execute specific agent Execute a specific agent type directly with intelligent execution strategy. @@ -134,8 +134,7 @@ def sync_detailed( Args: graph_id (str): agent_type (str): - mode (Union[None, ResponseMode, Unset]): Override execution mode: sync, async, stream, or - auto + mode (None | ResponseMode | Unset): Override execution mode: sync, async, stream, or auto body (AgentRequest): Request model for agent interactions. Raises: @@ -143,7 +142,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AgentResponse, Any, ErrorResponse, HTTPValidationError]] + Response[AgentResponse | Any | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -166,8 +165,8 @@ def sync( *, client: AuthenticatedClient, body: AgentRequest, - mode: Union[None, ResponseMode, Unset] = UNSET, -) -> Optional[Union[AgentResponse, Any, ErrorResponse, HTTPValidationError]]: + mode: None | ResponseMode | Unset = UNSET, +) -> AgentResponse | Any | ErrorResponse | HTTPValidationError | None: """Execute specific agent Execute a specific agent type directly with intelligent execution strategy. @@ -190,8 +189,7 @@ def sync( Args: graph_id (str): agent_type (str): - mode (Union[None, ResponseMode, Unset]): Override execution mode: sync, async, stream, or - auto + mode (None | ResponseMode | Unset): Override execution mode: sync, async, stream, or auto body (AgentRequest): Request model for agent interactions. Raises: @@ -199,7 +197,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AgentResponse, Any, ErrorResponse, HTTPValidationError] + AgentResponse | Any | ErrorResponse | HTTPValidationError """ return sync_detailed( @@ -217,8 +215,8 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: AgentRequest, - mode: Union[None, ResponseMode, Unset] = UNSET, -) -> Response[Union[AgentResponse, Any, ErrorResponse, HTTPValidationError]]: + mode: None | ResponseMode | Unset = UNSET, +) -> Response[AgentResponse | Any | ErrorResponse | HTTPValidationError]: """Execute specific agent Execute a specific agent type directly with intelligent execution strategy. @@ -241,8 +239,7 @@ async def asyncio_detailed( Args: graph_id (str): agent_type (str): - mode (Union[None, ResponseMode, Unset]): Override execution mode: sync, async, stream, or - auto + mode (None | ResponseMode | Unset): Override execution mode: sync, async, stream, or auto body (AgentRequest): Request model for agent interactions. Raises: @@ -250,7 +247,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AgentResponse, Any, ErrorResponse, HTTPValidationError]] + Response[AgentResponse | Any | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -271,8 +268,8 @@ async def asyncio( *, client: AuthenticatedClient, body: AgentRequest, - mode: Union[None, ResponseMode, Unset] = UNSET, -) -> Optional[Union[AgentResponse, Any, ErrorResponse, HTTPValidationError]]: + mode: None | ResponseMode | Unset = UNSET, +) -> AgentResponse | Any | ErrorResponse | HTTPValidationError | None: """Execute specific agent Execute a specific agent type directly with intelligent execution strategy. @@ -295,8 +292,7 @@ async def asyncio( Args: graph_id (str): agent_type (str): - mode (Union[None, ResponseMode, Unset]): Override execution mode: sync, async, stream, or - auto + mode (None | ResponseMode | Unset): Override execution mode: sync, async, stream, or auto body (AgentRequest): Request model for agent interactions. Raises: @@ -304,7 +300,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AgentResponse, Any, ErrorResponse, HTTPValidationError] + AgentResponse | Any | ErrorResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/agent/get_agent_metadata.py b/robosystems_client/api/agent/get_agent_metadata.py index 76f5caf..c6fa81a 100644 --- a/robosystems_client/api/agent/get_agent_metadata.py +++ b/robosystems_client/api/agent/get_agent_metadata.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -23,8 +23,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[AgentMetadataResponse, Any, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> AgentMetadataResponse | Any | HTTPValidationError | None: if response.status_code == 200: response_200 = AgentMetadataResponse.from_dict(response.json()) @@ -46,8 +46,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[AgentMetadataResponse, Any, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[AgentMetadataResponse | Any | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -61,7 +61,7 @@ def sync_detailed( agent_type: str, *, client: AuthenticatedClient, -) -> Response[Union[AgentMetadataResponse, Any, HTTPValidationError]]: +) -> Response[AgentMetadataResponse | Any | HTTPValidationError]: """Get agent metadata Get comprehensive metadata for a specific agent type. @@ -85,7 +85,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AgentMetadataResponse, Any, HTTPValidationError]] + Response[AgentMetadataResponse | Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -105,7 +105,7 @@ def sync( agent_type: str, *, client: AuthenticatedClient, -) -> Optional[Union[AgentMetadataResponse, Any, HTTPValidationError]]: +) -> AgentMetadataResponse | Any | HTTPValidationError | None: """Get agent metadata Get comprehensive metadata for a specific agent type. @@ -129,7 +129,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AgentMetadataResponse, Any, HTTPValidationError] + AgentMetadataResponse | Any | HTTPValidationError """ return sync_detailed( @@ -144,7 +144,7 @@ async def asyncio_detailed( agent_type: str, *, client: AuthenticatedClient, -) -> Response[Union[AgentMetadataResponse, Any, HTTPValidationError]]: +) -> Response[AgentMetadataResponse | Any | HTTPValidationError]: """Get agent metadata Get comprehensive metadata for a specific agent type. @@ -168,7 +168,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AgentMetadataResponse, Any, HTTPValidationError]] + Response[AgentMetadataResponse | Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -186,7 +186,7 @@ async def asyncio( agent_type: str, *, client: AuthenticatedClient, -) -> Optional[Union[AgentMetadataResponse, Any, HTTPValidationError]]: +) -> AgentMetadataResponse | Any | HTTPValidationError | None: """Get agent metadata Get comprehensive metadata for a specific agent type. @@ -210,7 +210,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AgentMetadataResponse, Any, HTTPValidationError] + AgentMetadataResponse | Any | HTTPValidationError """ return ( diff --git a/robosystems_client/api/agent/list_agents.py b/robosystems_client/api/agent/list_agents.py index 5410f1c..592eda0 100644 --- a/robosystems_client/api/agent/list_agents.py +++ b/robosystems_client/api/agent/list_agents.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -13,11 +13,11 @@ def _get_kwargs( graph_id: str, *, - capability: Union[None, Unset, str] = UNSET, + capability: None | str | Unset = UNSET, ) -> dict[str, Any]: params: dict[str, Any] = {} - json_capability: Union[None, Unset, str] + json_capability: None | str | Unset if isinstance(capability, Unset): json_capability = UNSET else: @@ -36,8 +36,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[AgentListResponse, Any, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> AgentListResponse | Any | HTTPValidationError | None: if response.status_code == 200: response_200 = AgentListResponse.from_dict(response.json()) @@ -59,8 +59,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[AgentListResponse, Any, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[AgentListResponse | Any | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -73,8 +73,8 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, - capability: Union[None, Unset, str] = UNSET, -) -> Response[Union[AgentListResponse, Any, HTTPValidationError]]: + capability: None | str | Unset = UNSET, +) -> Response[AgentListResponse | Any | HTTPValidationError]: """List available agents Get a comprehensive list of all available agents with their metadata. @@ -89,7 +89,7 @@ def sync_detailed( Args: graph_id (str): - capability (Union[None, Unset, str]): Filter by capability (e.g., 'financial_analysis', + capability (None | str | Unset): Filter by capability (e.g., 'financial_analysis', 'rag_search') Raises: @@ -97,7 +97,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AgentListResponse, Any, HTTPValidationError]] + Response[AgentListResponse | Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -116,8 +116,8 @@ def sync( graph_id: str, *, client: AuthenticatedClient, - capability: Union[None, Unset, str] = UNSET, -) -> Optional[Union[AgentListResponse, Any, HTTPValidationError]]: + capability: None | str | Unset = UNSET, +) -> AgentListResponse | Any | HTTPValidationError | None: """List available agents Get a comprehensive list of all available agents with their metadata. @@ -132,7 +132,7 @@ def sync( Args: graph_id (str): - capability (Union[None, Unset, str]): Filter by capability (e.g., 'financial_analysis', + capability (None | str | Unset): Filter by capability (e.g., 'financial_analysis', 'rag_search') Raises: @@ -140,7 +140,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AgentListResponse, Any, HTTPValidationError] + AgentListResponse | Any | HTTPValidationError """ return sync_detailed( @@ -154,8 +154,8 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, - capability: Union[None, Unset, str] = UNSET, -) -> Response[Union[AgentListResponse, Any, HTTPValidationError]]: + capability: None | str | Unset = UNSET, +) -> Response[AgentListResponse | Any | HTTPValidationError]: """List available agents Get a comprehensive list of all available agents with their metadata. @@ -170,7 +170,7 @@ async def asyncio_detailed( Args: graph_id (str): - capability (Union[None, Unset, str]): Filter by capability (e.g., 'financial_analysis', + capability (None | str | Unset): Filter by capability (e.g., 'financial_analysis', 'rag_search') Raises: @@ -178,7 +178,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AgentListResponse, Any, HTTPValidationError]] + Response[AgentListResponse | Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -195,8 +195,8 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, - capability: Union[None, Unset, str] = UNSET, -) -> Optional[Union[AgentListResponse, Any, HTTPValidationError]]: + capability: None | str | Unset = UNSET, +) -> AgentListResponse | Any | HTTPValidationError | None: """List available agents Get a comprehensive list of all available agents with their metadata. @@ -211,7 +211,7 @@ async def asyncio( Args: graph_id (str): - capability (Union[None, Unset, str]): Filter by capability (e.g., 'financial_analysis', + capability (None | str | Unset): Filter by capability (e.g., 'financial_analysis', 'rag_search') Raises: @@ -219,7 +219,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AgentListResponse, Any, HTTPValidationError] + AgentListResponse | Any | HTTPValidationError """ return ( diff --git a/robosystems_client/api/agent/recommend_agent.py b/robosystems_client/api/agent/recommend_agent.py index 79e26a9..07f42c2 100644 --- a/robosystems_client/api/agent/recommend_agent.py +++ b/robosystems_client/api/agent/recommend_agent.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -32,8 +32,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[AgentRecommendationResponse, Any, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> AgentRecommendationResponse | Any | HTTPValidationError | None: if response.status_code == 200: response_200 = AgentRecommendationResponse.from_dict(response.json()) @@ -55,8 +55,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[AgentRecommendationResponse, Any, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[AgentRecommendationResponse | Any | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -70,7 +70,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: AgentRecommendationRequest, -) -> Response[Union[AgentRecommendationResponse, Any, HTTPValidationError]]: +) -> Response[AgentRecommendationResponse | Any | HTTPValidationError]: """Get agent recommendations Get intelligent agent recommendations for a specific query. @@ -97,7 +97,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AgentRecommendationResponse, Any, HTTPValidationError]] + Response[AgentRecommendationResponse | Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -117,7 +117,7 @@ def sync( *, client: AuthenticatedClient, body: AgentRecommendationRequest, -) -> Optional[Union[AgentRecommendationResponse, Any, HTTPValidationError]]: +) -> AgentRecommendationResponse | Any | HTTPValidationError | None: """Get agent recommendations Get intelligent agent recommendations for a specific query. @@ -144,7 +144,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AgentRecommendationResponse, Any, HTTPValidationError] + AgentRecommendationResponse | Any | HTTPValidationError """ return sync_detailed( @@ -159,7 +159,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: AgentRecommendationRequest, -) -> Response[Union[AgentRecommendationResponse, Any, HTTPValidationError]]: +) -> Response[AgentRecommendationResponse | Any | HTTPValidationError]: """Get agent recommendations Get intelligent agent recommendations for a specific query. @@ -186,7 +186,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AgentRecommendationResponse, Any, HTTPValidationError]] + Response[AgentRecommendationResponse | Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -204,7 +204,7 @@ async def asyncio( *, client: AuthenticatedClient, body: AgentRecommendationRequest, -) -> Optional[Union[AgentRecommendationResponse, Any, HTTPValidationError]]: +) -> AgentRecommendationResponse | Any | HTTPValidationError | None: """Get agent recommendations Get intelligent agent recommendations for a specific query. @@ -231,7 +231,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AgentRecommendationResponse, Any, HTTPValidationError] + AgentRecommendationResponse | Any | HTTPValidationError """ return ( diff --git a/robosystems_client/api/auth/check_password_strength.py b/robosystems_client/api/auth/check_password_strength.py index 787bb1a..897a860 100644 --- a/robosystems_client/api/auth/check_password_strength.py +++ b/robosystems_client/api/auth/check_password_strength.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -31,8 +31,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, PasswordCheckResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | PasswordCheckResponse | None: if response.status_code == 200: response_200 = PasswordCheckResponse.from_dict(response.json()) @@ -50,8 +50,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, PasswordCheckResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | PasswordCheckResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -62,9 +62,9 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: PasswordCheckRequest, -) -> Response[Union[HTTPValidationError, PasswordCheckResponse]]: +) -> Response[HTTPValidationError | PasswordCheckResponse]: """Check Password Strength Check password strength and get validation feedback @@ -77,7 +77,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, PasswordCheckResponse]] + Response[HTTPValidationError | PasswordCheckResponse] """ kwargs = _get_kwargs( @@ -93,9 +93,9 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: PasswordCheckRequest, -) -> Optional[Union[HTTPValidationError, PasswordCheckResponse]]: +) -> HTTPValidationError | PasswordCheckResponse | None: """Check Password Strength Check password strength and get validation feedback @@ -108,7 +108,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, PasswordCheckResponse] + HTTPValidationError | PasswordCheckResponse """ return sync_detailed( @@ -119,9 +119,9 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: PasswordCheckRequest, -) -> Response[Union[HTTPValidationError, PasswordCheckResponse]]: +) -> Response[HTTPValidationError | PasswordCheckResponse]: """Check Password Strength Check password strength and get validation feedback @@ -134,7 +134,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, PasswordCheckResponse]] + Response[HTTPValidationError | PasswordCheckResponse] """ kwargs = _get_kwargs( @@ -148,9 +148,9 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: PasswordCheckRequest, -) -> Optional[Union[HTTPValidationError, PasswordCheckResponse]]: +) -> HTTPValidationError | PasswordCheckResponse | None: """Check Password Strength Check password strength and get validation feedback @@ -163,7 +163,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, PasswordCheckResponse] + HTTPValidationError | PasswordCheckResponse """ return ( diff --git a/robosystems_client/api/auth/complete_sso_auth.py b/robosystems_client/api/auth/complete_sso_auth.py index 7c42722..1c63004 100644 --- a/robosystems_client/api/auth/complete_sso_auth.py +++ b/robosystems_client/api/auth/complete_sso_auth.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -32,8 +32,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> AuthResponse | ErrorResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = AuthResponse.from_dict(response.json()) @@ -56,8 +56,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[AuthResponse | ErrorResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -68,9 +68,9 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: SSOCompleteRequest, -) -> Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> Response[AuthResponse | ErrorResponse | HTTPValidationError]: """Complete SSO Authentication Complete SSO authentication using session ID from secure handoff. @@ -83,7 +83,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]] + Response[AuthResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -99,9 +99,9 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: SSOCompleteRequest, -) -> Optional[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> AuthResponse | ErrorResponse | HTTPValidationError | None: """Complete SSO Authentication Complete SSO authentication using session ID from secure handoff. @@ -114,7 +114,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AuthResponse, ErrorResponse, HTTPValidationError] + AuthResponse | ErrorResponse | HTTPValidationError """ return sync_detailed( @@ -125,9 +125,9 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: SSOCompleteRequest, -) -> Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> Response[AuthResponse | ErrorResponse | HTTPValidationError]: """Complete SSO Authentication Complete SSO authentication using session ID from secure handoff. @@ -140,7 +140,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]] + Response[AuthResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -154,9 +154,9 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: SSOCompleteRequest, -) -> Optional[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> AuthResponse | ErrorResponse | HTTPValidationError | None: """Complete SSO Authentication Complete SSO authentication using session ID from secure handoff. @@ -169,7 +169,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AuthResponse, ErrorResponse, HTTPValidationError] + AuthResponse | ErrorResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/auth/forgot_password.py b/robosystems_client/api/auth/forgot_password.py index a0d2c1e..bfe0eb5 100644 --- a/robosystems_client/api/auth/forgot_password.py +++ b/robosystems_client/api/auth/forgot_password.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -34,10 +34,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[ - Union[ErrorResponse, ForgotPasswordResponseForgotpassword, HTTPValidationError] -]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ErrorResponse | ForgotPasswordResponseForgotpassword | HTTPValidationError | None: if response.status_code == 200: response_200 = ForgotPasswordResponseForgotpassword.from_dict(response.json()) @@ -60,9 +58,9 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response + *, client: AuthenticatedClient | Client, response: httpx.Response ) -> Response[ - Union[ErrorResponse, ForgotPasswordResponseForgotpassword, HTTPValidationError] + ErrorResponse | ForgotPasswordResponseForgotpassword | HTTPValidationError ]: return Response( status_code=HTTPStatus(response.status_code), @@ -74,10 +72,10 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: ForgotPasswordRequest, ) -> Response[ - Union[ErrorResponse, ForgotPasswordResponseForgotpassword, HTTPValidationError] + ErrorResponse | ForgotPasswordResponseForgotpassword | HTTPValidationError ]: """Forgot Password @@ -91,7 +89,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, ForgotPasswordResponseForgotpassword, HTTPValidationError]] + Response[ErrorResponse | ForgotPasswordResponseForgotpassword | HTTPValidationError] """ kwargs = _get_kwargs( @@ -107,11 +105,9 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: ForgotPasswordRequest, -) -> Optional[ - Union[ErrorResponse, ForgotPasswordResponseForgotpassword, HTTPValidationError] -]: +) -> ErrorResponse | ForgotPasswordResponseForgotpassword | HTTPValidationError | None: """Forgot Password Request password reset email. Always returns success to prevent email enumeration. @@ -124,7 +120,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, ForgotPasswordResponseForgotpassword, HTTPValidationError] + ErrorResponse | ForgotPasswordResponseForgotpassword | HTTPValidationError """ return sync_detailed( @@ -135,10 +131,10 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: ForgotPasswordRequest, ) -> Response[ - Union[ErrorResponse, ForgotPasswordResponseForgotpassword, HTTPValidationError] + ErrorResponse | ForgotPasswordResponseForgotpassword | HTTPValidationError ]: """Forgot Password @@ -152,7 +148,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, ForgotPasswordResponseForgotpassword, HTTPValidationError]] + Response[ErrorResponse | ForgotPasswordResponseForgotpassword | HTTPValidationError] """ kwargs = _get_kwargs( @@ -166,11 +162,9 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: ForgotPasswordRequest, -) -> Optional[ - Union[ErrorResponse, ForgotPasswordResponseForgotpassword, HTTPValidationError] -]: +) -> ErrorResponse | ForgotPasswordResponseForgotpassword | HTTPValidationError | None: """Forgot Password Request password reset email. Always returns success to prevent email enumeration. @@ -183,7 +177,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, ForgotPasswordResponseForgotpassword, HTTPValidationError] + ErrorResponse | ForgotPasswordResponseForgotpassword | HTTPValidationError """ return ( diff --git a/robosystems_client/api/auth/generate_sso_token.py b/robosystems_client/api/auth/generate_sso_token.py index 0a1325f..532d3c7 100644 --- a/robosystems_client/api/auth/generate_sso_token.py +++ b/robosystems_client/api/auth/generate_sso_token.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -13,7 +13,7 @@ def _get_kwargs( *, - auth_token: Union[None, Unset, str] = UNSET, + auth_token: None | str | Unset = UNSET, ) -> dict[str, Any]: cookies = {} if auth_token is not UNSET: @@ -29,8 +29,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[ErrorResponse, HTTPValidationError, SSOTokenResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ErrorResponse | HTTPValidationError | SSOTokenResponse | None: if response.status_code == 200: response_200 = SSOTokenResponse.from_dict(response.json()) @@ -53,8 +53,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[ErrorResponse, HTTPValidationError, SSOTokenResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[ErrorResponse | HTTPValidationError | SSOTokenResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -65,22 +65,22 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], - auth_token: Union[None, Unset, str] = UNSET, -) -> Response[Union[ErrorResponse, HTTPValidationError, SSOTokenResponse]]: + client: AuthenticatedClient | Client, + auth_token: None | str | Unset = UNSET, +) -> Response[ErrorResponse | HTTPValidationError | SSOTokenResponse]: """Generate SSO Token Generate a temporary SSO token for cross-app authentication. Args: - auth_token (Union[None, Unset, str]): + auth_token (None | str | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, SSOTokenResponse]] + Response[ErrorResponse | HTTPValidationError | SSOTokenResponse] """ kwargs = _get_kwargs( @@ -96,22 +96,22 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], - auth_token: Union[None, Unset, str] = UNSET, -) -> Optional[Union[ErrorResponse, HTTPValidationError, SSOTokenResponse]]: + client: AuthenticatedClient | Client, + auth_token: None | str | Unset = UNSET, +) -> ErrorResponse | HTTPValidationError | SSOTokenResponse | None: """Generate SSO Token Generate a temporary SSO token for cross-app authentication. Args: - auth_token (Union[None, Unset, str]): + auth_token (None | str | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, SSOTokenResponse] + ErrorResponse | HTTPValidationError | SSOTokenResponse """ return sync_detailed( @@ -122,22 +122,22 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], - auth_token: Union[None, Unset, str] = UNSET, -) -> Response[Union[ErrorResponse, HTTPValidationError, SSOTokenResponse]]: + client: AuthenticatedClient | Client, + auth_token: None | str | Unset = UNSET, +) -> Response[ErrorResponse | HTTPValidationError | SSOTokenResponse]: """Generate SSO Token Generate a temporary SSO token for cross-app authentication. Args: - auth_token (Union[None, Unset, str]): + auth_token (None | str | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, SSOTokenResponse]] + Response[ErrorResponse | HTTPValidationError | SSOTokenResponse] """ kwargs = _get_kwargs( @@ -151,22 +151,22 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], - auth_token: Union[None, Unset, str] = UNSET, -) -> Optional[Union[ErrorResponse, HTTPValidationError, SSOTokenResponse]]: + client: AuthenticatedClient | Client, + auth_token: None | str | Unset = UNSET, +) -> ErrorResponse | HTTPValidationError | SSOTokenResponse | None: """Generate SSO Token Generate a temporary SSO token for cross-app authentication. Args: - auth_token (Union[None, Unset, str]): + auth_token (None | str | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, SSOTokenResponse] + ErrorResponse | HTTPValidationError | SSOTokenResponse """ return ( diff --git a/robosystems_client/api/auth/get_captcha_config.py b/robosystems_client/api/auth/get_captcha_config.py index 2f93a91..3c4318d 100644 --- a/robosystems_client/api/auth/get_captcha_config.py +++ b/robosystems_client/api/auth/get_captcha_config.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -18,8 +18,8 @@ def _get_kwargs() -> dict[str, Any]: def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Any]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | None: if response.status_code == 200: return None @@ -30,7 +30,7 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response + *, client: AuthenticatedClient | Client, response: httpx.Response ) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), @@ -42,7 +42,7 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[Any]: """Get CAPTCHA Configuration @@ -67,7 +67,7 @@ def sync_detailed( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[Any]: """Get CAPTCHA Configuration diff --git a/robosystems_client/api/auth/get_current_auth_user.py b/robosystems_client/api/auth/get_current_auth_user.py index d210cd8..ddb1870 100644 --- a/robosystems_client/api/auth/get_current_auth_user.py +++ b/robosystems_client/api/auth/get_current_auth_user.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -22,8 +22,8 @@ def _get_kwargs() -> dict[str, Any]: def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[ErrorResponse, GetCurrentAuthUserResponseGetcurrentauthuser]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ErrorResponse | GetCurrentAuthUserResponseGetcurrentauthuser | None: if response.status_code == 200: response_200 = GetCurrentAuthUserResponseGetcurrentauthuser.from_dict( response.json() @@ -43,8 +43,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[ErrorResponse, GetCurrentAuthUserResponseGetcurrentauthuser]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[ErrorResponse | GetCurrentAuthUserResponseGetcurrentauthuser]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -55,8 +55,8 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], -) -> Response[Union[ErrorResponse, GetCurrentAuthUserResponseGetcurrentauthuser]]: + client: AuthenticatedClient | Client, +) -> Response[ErrorResponse | GetCurrentAuthUserResponseGetcurrentauthuser]: """Get Current User Get the currently authenticated user. @@ -66,7 +66,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, GetCurrentAuthUserResponseGetcurrentauthuser]] + Response[ErrorResponse | GetCurrentAuthUserResponseGetcurrentauthuser] """ kwargs = _get_kwargs() @@ -80,8 +80,8 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[Union[ErrorResponse, GetCurrentAuthUserResponseGetcurrentauthuser]]: + client: AuthenticatedClient | Client, +) -> ErrorResponse | GetCurrentAuthUserResponseGetcurrentauthuser | None: """Get Current User Get the currently authenticated user. @@ -91,7 +91,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, GetCurrentAuthUserResponseGetcurrentauthuser] + ErrorResponse | GetCurrentAuthUserResponseGetcurrentauthuser """ return sync_detailed( @@ -101,8 +101,8 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], -) -> Response[Union[ErrorResponse, GetCurrentAuthUserResponseGetcurrentauthuser]]: + client: AuthenticatedClient | Client, +) -> Response[ErrorResponse | GetCurrentAuthUserResponseGetcurrentauthuser]: """Get Current User Get the currently authenticated user. @@ -112,7 +112,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, GetCurrentAuthUserResponseGetcurrentauthuser]] + Response[ErrorResponse | GetCurrentAuthUserResponseGetcurrentauthuser] """ kwargs = _get_kwargs() @@ -124,8 +124,8 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[Union[ErrorResponse, GetCurrentAuthUserResponseGetcurrentauthuser]]: + client: AuthenticatedClient | Client, +) -> ErrorResponse | GetCurrentAuthUserResponseGetcurrentauthuser | None: """Get Current User Get the currently authenticated user. @@ -135,7 +135,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, GetCurrentAuthUserResponseGetcurrentauthuser] + ErrorResponse | GetCurrentAuthUserResponseGetcurrentauthuser """ return ( diff --git a/robosystems_client/api/auth/get_password_policy.py b/robosystems_client/api/auth/get_password_policy.py index 3fac1bd..95cc17d 100644 --- a/robosystems_client/api/auth/get_password_policy.py +++ b/robosystems_client/api/auth/get_password_policy.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -19,8 +19,8 @@ def _get_kwargs() -> dict[str, Any]: def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[PasswordPolicyResponse]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> PasswordPolicyResponse | None: if response.status_code == 200: response_200 = PasswordPolicyResponse.from_dict(response.json()) @@ -33,7 +33,7 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response + *, client: AuthenticatedClient | Client, response: httpx.Response ) -> Response[PasswordPolicyResponse]: return Response( status_code=HTTPStatus(response.status_code), @@ -45,7 +45,7 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[PasswordPolicyResponse]: """Get Password Policy @@ -70,8 +70,8 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[PasswordPolicyResponse]: + client: AuthenticatedClient | Client, +) -> PasswordPolicyResponse | None: """Get Password Policy Get current password policy requirements for frontend validation @@ -91,7 +91,7 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[PasswordPolicyResponse]: """Get Password Policy @@ -114,8 +114,8 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[PasswordPolicyResponse]: + client: AuthenticatedClient | Client, +) -> PasswordPolicyResponse | None: """Get Password Policy Get current password policy requirements for frontend validation diff --git a/robosystems_client/api/auth/login_user.py b/robosystems_client/api/auth/login_user.py index fc76f7c..caece08 100644 --- a/robosystems_client/api/auth/login_user.py +++ b/robosystems_client/api/auth/login_user.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -32,8 +32,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> AuthResponse | ErrorResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = AuthResponse.from_dict(response.json()) @@ -61,8 +61,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[AuthResponse | ErrorResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -73,9 +73,9 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: LoginRequest, -) -> Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> Response[AuthResponse | ErrorResponse | HTTPValidationError]: """User Login Authenticate user with email and password. @@ -88,7 +88,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]] + Response[AuthResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -104,9 +104,9 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: LoginRequest, -) -> Optional[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> AuthResponse | ErrorResponse | HTTPValidationError | None: """User Login Authenticate user with email and password. @@ -119,7 +119,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AuthResponse, ErrorResponse, HTTPValidationError] + AuthResponse | ErrorResponse | HTTPValidationError """ return sync_detailed( @@ -130,9 +130,9 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: LoginRequest, -) -> Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> Response[AuthResponse | ErrorResponse | HTTPValidationError]: """User Login Authenticate user with email and password. @@ -145,7 +145,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]] + Response[AuthResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -159,9 +159,9 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: LoginRequest, -) -> Optional[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> AuthResponse | ErrorResponse | HTTPValidationError | None: """User Login Authenticate user with email and password. @@ -174,7 +174,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AuthResponse, ErrorResponse, HTTPValidationError] + AuthResponse | ErrorResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/auth/logout_user.py b/robosystems_client/api/auth/logout_user.py index ee6a5de..05b1ec1 100644 --- a/robosystems_client/api/auth/logout_user.py +++ b/robosystems_client/api/auth/logout_user.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -19,8 +19,8 @@ def _get_kwargs() -> dict[str, Any]: def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[LogoutUserResponseLogoutuser]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> LogoutUserResponseLogoutuser | None: if response.status_code == 200: response_200 = LogoutUserResponseLogoutuser.from_dict(response.json()) @@ -33,7 +33,7 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response + *, client: AuthenticatedClient | Client, response: httpx.Response ) -> Response[LogoutUserResponseLogoutuser]: return Response( status_code=HTTPStatus(response.status_code), @@ -45,7 +45,7 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[LogoutUserResponseLogoutuser]: """User Logout @@ -70,8 +70,8 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[LogoutUserResponseLogoutuser]: + client: AuthenticatedClient | Client, +) -> LogoutUserResponseLogoutuser | None: """User Logout Logout user and invalidate session. @@ -91,7 +91,7 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[LogoutUserResponseLogoutuser]: """User Logout @@ -114,8 +114,8 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[LogoutUserResponseLogoutuser]: + client: AuthenticatedClient | Client, +) -> LogoutUserResponseLogoutuser | None: """User Logout Logout user and invalidate session. diff --git a/robosystems_client/api/auth/refresh_auth_session.py b/robosystems_client/api/auth/refresh_auth_session.py index 09bc4cd..f1cb1a7 100644 --- a/robosystems_client/api/auth/refresh_auth_session.py +++ b/robosystems_client/api/auth/refresh_auth_session.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -20,8 +20,8 @@ def _get_kwargs() -> dict[str, Any]: def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[AuthResponse, ErrorResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> AuthResponse | ErrorResponse | None: if response.status_code == 200: response_200 = AuthResponse.from_dict(response.json()) @@ -39,8 +39,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[AuthResponse, ErrorResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[AuthResponse | ErrorResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -51,8 +51,8 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], -) -> Response[Union[AuthResponse, ErrorResponse]]: + client: AuthenticatedClient | Client, +) -> Response[AuthResponse | ErrorResponse]: """Refresh Session Refresh authentication session with a new JWT token. @@ -62,7 +62,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AuthResponse, ErrorResponse]] + Response[AuthResponse | ErrorResponse] """ kwargs = _get_kwargs() @@ -76,8 +76,8 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[Union[AuthResponse, ErrorResponse]]: + client: AuthenticatedClient | Client, +) -> AuthResponse | ErrorResponse | None: """Refresh Session Refresh authentication session with a new JWT token. @@ -87,7 +87,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AuthResponse, ErrorResponse] + AuthResponse | ErrorResponse """ return sync_detailed( @@ -97,8 +97,8 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], -) -> Response[Union[AuthResponse, ErrorResponse]]: + client: AuthenticatedClient | Client, +) -> Response[AuthResponse | ErrorResponse]: """Refresh Session Refresh authentication session with a new JWT token. @@ -108,7 +108,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AuthResponse, ErrorResponse]] + Response[AuthResponse | ErrorResponse] """ kwargs = _get_kwargs() @@ -120,8 +120,8 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[Union[AuthResponse, ErrorResponse]]: + client: AuthenticatedClient | Client, +) -> AuthResponse | ErrorResponse | None: """Refresh Session Refresh authentication session with a new JWT token. @@ -131,7 +131,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AuthResponse, ErrorResponse] + AuthResponse | ErrorResponse """ return ( diff --git a/robosystems_client/api/auth/register_user.py b/robosystems_client/api/auth/register_user.py index b8f6c3e..11732af 100644 --- a/robosystems_client/api/auth/register_user.py +++ b/robosystems_client/api/auth/register_user.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -32,8 +32,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> AuthResponse | ErrorResponse | HTTPValidationError | None: if response.status_code == 201: response_201 = AuthResponse.from_dict(response.json()) @@ -66,8 +66,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[AuthResponse | ErrorResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -78,9 +78,9 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: RegisterRequest, -) -> Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> Response[AuthResponse | ErrorResponse | HTTPValidationError]: """Register New User Register a new user account with email and password. @@ -101,7 +101,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]] + Response[AuthResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -117,9 +117,9 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: RegisterRequest, -) -> Optional[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> AuthResponse | ErrorResponse | HTTPValidationError | None: """Register New User Register a new user account with email and password. @@ -140,7 +140,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AuthResponse, ErrorResponse, HTTPValidationError] + AuthResponse | ErrorResponse | HTTPValidationError """ return sync_detailed( @@ -151,9 +151,9 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: RegisterRequest, -) -> Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> Response[AuthResponse | ErrorResponse | HTTPValidationError]: """Register New User Register a new user account with email and password. @@ -174,7 +174,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]] + Response[AuthResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -188,9 +188,9 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: RegisterRequest, -) -> Optional[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> AuthResponse | ErrorResponse | HTTPValidationError | None: """Register New User Register a new user account with email and password. @@ -211,7 +211,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AuthResponse, ErrorResponse, HTTPValidationError] + AuthResponse | ErrorResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/auth/resend_verification_email.py b/robosystems_client/api/auth/resend_verification_email.py index 5f3d199..31ba154 100644 --- a/robosystems_client/api/auth/resend_verification_email.py +++ b/robosystems_client/api/auth/resend_verification_email.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -22,10 +22,8 @@ def _get_kwargs() -> dict[str, Any]: def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[ - Union[ErrorResponse, ResendVerificationEmailResponseResendverificationemail] -]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ErrorResponse | ResendVerificationEmailResponseResendverificationemail | None: if response.status_code == 200: response_200 = ResendVerificationEmailResponseResendverificationemail.from_dict( response.json() @@ -55,10 +53,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[ - Union[ErrorResponse, ResendVerificationEmailResponseResendverificationemail] -]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[ErrorResponse | ResendVerificationEmailResponseResendverificationemail]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -69,10 +65,8 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], -) -> Response[ - Union[ErrorResponse, ResendVerificationEmailResponseResendverificationemail] -]: + client: AuthenticatedClient | Client, +) -> Response[ErrorResponse | ResendVerificationEmailResponseResendverificationemail]: """Resend Email Verification Resend verification email to the authenticated user. Rate limited to 3 per hour. @@ -82,7 +76,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, ResendVerificationEmailResponseResendverificationemail]] + Response[ErrorResponse | ResendVerificationEmailResponseResendverificationemail] """ kwargs = _get_kwargs() @@ -96,10 +90,8 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[ - Union[ErrorResponse, ResendVerificationEmailResponseResendverificationemail] -]: + client: AuthenticatedClient | Client, +) -> ErrorResponse | ResendVerificationEmailResponseResendverificationemail | None: """Resend Email Verification Resend verification email to the authenticated user. Rate limited to 3 per hour. @@ -109,7 +101,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, ResendVerificationEmailResponseResendverificationemail] + ErrorResponse | ResendVerificationEmailResponseResendverificationemail """ return sync_detailed( @@ -119,10 +111,8 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], -) -> Response[ - Union[ErrorResponse, ResendVerificationEmailResponseResendverificationemail] -]: + client: AuthenticatedClient | Client, +) -> Response[ErrorResponse | ResendVerificationEmailResponseResendverificationemail]: """Resend Email Verification Resend verification email to the authenticated user. Rate limited to 3 per hour. @@ -132,7 +122,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, ResendVerificationEmailResponseResendverificationemail]] + Response[ErrorResponse | ResendVerificationEmailResponseResendverificationemail] """ kwargs = _get_kwargs() @@ -144,10 +134,8 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[ - Union[ErrorResponse, ResendVerificationEmailResponseResendverificationemail] -]: + client: AuthenticatedClient | Client, +) -> ErrorResponse | ResendVerificationEmailResponseResendverificationemail | None: """Resend Email Verification Resend verification email to the authenticated user. Rate limited to 3 per hour. @@ -157,7 +145,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, ResendVerificationEmailResponseResendverificationemail] + ErrorResponse | ResendVerificationEmailResponseResendverificationemail """ return ( diff --git a/robosystems_client/api/auth/reset_password.py b/robosystems_client/api/auth/reset_password.py index ea67ffd..ca98298 100644 --- a/robosystems_client/api/auth/reset_password.py +++ b/robosystems_client/api/auth/reset_password.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -32,8 +32,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> AuthResponse | ErrorResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = AuthResponse.from_dict(response.json()) @@ -56,8 +56,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[AuthResponse | ErrorResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -68,9 +68,9 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: ResetPasswordRequest, -) -> Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> Response[AuthResponse | ErrorResponse | HTTPValidationError]: """Reset Password Reset password with token from email. Returns JWT for auto-login. @@ -83,7 +83,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]] + Response[AuthResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -99,9 +99,9 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: ResetPasswordRequest, -) -> Optional[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> AuthResponse | ErrorResponse | HTTPValidationError | None: """Reset Password Reset password with token from email. Returns JWT for auto-login. @@ -114,7 +114,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AuthResponse, ErrorResponse, HTTPValidationError] + AuthResponse | ErrorResponse | HTTPValidationError """ return sync_detailed( @@ -125,9 +125,9 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: ResetPasswordRequest, -) -> Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> Response[AuthResponse | ErrorResponse | HTTPValidationError]: """Reset Password Reset password with token from email. Returns JWT for auto-login. @@ -140,7 +140,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]] + Response[AuthResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -154,9 +154,9 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: ResetPasswordRequest, -) -> Optional[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> AuthResponse | ErrorResponse | HTTPValidationError | None: """Reset Password Reset password with token from email. Returns JWT for auto-login. @@ -169,7 +169,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AuthResponse, ErrorResponse, HTTPValidationError] + AuthResponse | ErrorResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/auth/sso_token_exchange.py b/robosystems_client/api/auth/sso_token_exchange.py index 4bb1793..a8921cb 100644 --- a/robosystems_client/api/auth/sso_token_exchange.py +++ b/robosystems_client/api/auth/sso_token_exchange.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -32,8 +32,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[ErrorResponse, HTTPValidationError, SSOExchangeResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ErrorResponse | HTTPValidationError | SSOExchangeResponse | None: if response.status_code == 200: response_200 = SSOExchangeResponse.from_dict(response.json()) @@ -61,8 +61,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[ErrorResponse, HTTPValidationError, SSOExchangeResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[ErrorResponse | HTTPValidationError | SSOExchangeResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -73,9 +73,9 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: SSOExchangeRequest, -) -> Response[Union[ErrorResponse, HTTPValidationError, SSOExchangeResponse]]: +) -> Response[ErrorResponse | HTTPValidationError | SSOExchangeResponse]: """SSO Token Exchange Exchange SSO token for secure session handoff to target application. @@ -88,7 +88,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, SSOExchangeResponse]] + Response[ErrorResponse | HTTPValidationError | SSOExchangeResponse] """ kwargs = _get_kwargs( @@ -104,9 +104,9 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: SSOExchangeRequest, -) -> Optional[Union[ErrorResponse, HTTPValidationError, SSOExchangeResponse]]: +) -> ErrorResponse | HTTPValidationError | SSOExchangeResponse | None: """SSO Token Exchange Exchange SSO token for secure session handoff to target application. @@ -119,7 +119,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, SSOExchangeResponse] + ErrorResponse | HTTPValidationError | SSOExchangeResponse """ return sync_detailed( @@ -130,9 +130,9 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: SSOExchangeRequest, -) -> Response[Union[ErrorResponse, HTTPValidationError, SSOExchangeResponse]]: +) -> Response[ErrorResponse | HTTPValidationError | SSOExchangeResponse]: """SSO Token Exchange Exchange SSO token for secure session handoff to target application. @@ -145,7 +145,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, SSOExchangeResponse]] + Response[ErrorResponse | HTTPValidationError | SSOExchangeResponse] """ kwargs = _get_kwargs( @@ -159,9 +159,9 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: SSOExchangeRequest, -) -> Optional[Union[ErrorResponse, HTTPValidationError, SSOExchangeResponse]]: +) -> ErrorResponse | HTTPValidationError | SSOExchangeResponse | None: """SSO Token Exchange Exchange SSO token for secure session handoff to target application. @@ -174,7 +174,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, SSOExchangeResponse] + ErrorResponse | HTTPValidationError | SSOExchangeResponse """ return ( diff --git a/robosystems_client/api/auth/validate_reset_token.py b/robosystems_client/api/auth/validate_reset_token.py index 34c8953..8972346 100644 --- a/robosystems_client/api/auth/validate_reset_token.py +++ b/robosystems_client/api/auth/validate_reset_token.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -30,8 +30,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, ResetPasswordValidateResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | ResetPasswordValidateResponse | None: if response.status_code == 200: response_200 = ResetPasswordValidateResponse.from_dict(response.json()) @@ -49,8 +49,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, ResetPasswordValidateResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | ResetPasswordValidateResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -61,9 +61,9 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, token: str, -) -> Response[Union[HTTPValidationError, ResetPasswordValidateResponse]]: +) -> Response[HTTPValidationError | ResetPasswordValidateResponse]: """Validate Reset Token Check if a password reset token is valid without consuming it. @@ -76,7 +76,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, ResetPasswordValidateResponse]] + Response[HTTPValidationError | ResetPasswordValidateResponse] """ kwargs = _get_kwargs( @@ -92,9 +92,9 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, token: str, -) -> Optional[Union[HTTPValidationError, ResetPasswordValidateResponse]]: +) -> HTTPValidationError | ResetPasswordValidateResponse | None: """Validate Reset Token Check if a password reset token is valid without consuming it. @@ -107,7 +107,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, ResetPasswordValidateResponse] + HTTPValidationError | ResetPasswordValidateResponse """ return sync_detailed( @@ -118,9 +118,9 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, token: str, -) -> Response[Union[HTTPValidationError, ResetPasswordValidateResponse]]: +) -> Response[HTTPValidationError | ResetPasswordValidateResponse]: """Validate Reset Token Check if a password reset token is valid without consuming it. @@ -133,7 +133,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, ResetPasswordValidateResponse]] + Response[HTTPValidationError | ResetPasswordValidateResponse] """ kwargs = _get_kwargs( @@ -147,9 +147,9 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, token: str, -) -> Optional[Union[HTTPValidationError, ResetPasswordValidateResponse]]: +) -> HTTPValidationError | ResetPasswordValidateResponse | None: """Validate Reset Token Check if a password reset token is valid without consuming it. @@ -162,7 +162,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, ResetPasswordValidateResponse] + HTTPValidationError | ResetPasswordValidateResponse """ return ( diff --git a/robosystems_client/api/auth/verify_email.py b/robosystems_client/api/auth/verify_email.py index e2515f6..456aaba 100644 --- a/robosystems_client/api/auth/verify_email.py +++ b/robosystems_client/api/auth/verify_email.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -32,8 +32,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> AuthResponse | ErrorResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = AuthResponse.from_dict(response.json()) @@ -56,8 +56,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[AuthResponse | ErrorResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -68,9 +68,9 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: EmailVerificationRequest, -) -> Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> Response[AuthResponse | ErrorResponse | HTTPValidationError]: """Verify Email Verify email address with token from email link. Returns JWT for auto-login. @@ -83,7 +83,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]] + Response[AuthResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -99,9 +99,9 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: EmailVerificationRequest, -) -> Optional[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> AuthResponse | ErrorResponse | HTTPValidationError | None: """Verify Email Verify email address with token from email link. Returns JWT for auto-login. @@ -114,7 +114,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AuthResponse, ErrorResponse, HTTPValidationError] + AuthResponse | ErrorResponse | HTTPValidationError """ return sync_detailed( @@ -125,9 +125,9 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: EmailVerificationRequest, -) -> Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> Response[AuthResponse | ErrorResponse | HTTPValidationError]: """Verify Email Verify email address with token from email link. Returns JWT for auto-login. @@ -140,7 +140,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[AuthResponse, ErrorResponse, HTTPValidationError]] + Response[AuthResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -154,9 +154,9 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: EmailVerificationRequest, -) -> Optional[Union[AuthResponse, ErrorResponse, HTTPValidationError]]: +) -> AuthResponse | ErrorResponse | HTTPValidationError | None: """Verify Email Verify email address with token from email link. Returns JWT for auto-login. @@ -169,7 +169,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[AuthResponse, ErrorResponse, HTTPValidationError] + AuthResponse | ErrorResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/backup/create_backup.py b/robosystems_client/api/backup/create_backup.py index b514a26..4511039 100644 --- a/robosystems_client/api/backup/create_backup.py +++ b/robosystems_client/api/backup/create_backup.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -32,8 +32,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | ErrorResponse | HTTPValidationError | None: if response.status_code == 202: response_202 = response.json() return response_202 @@ -70,8 +70,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | ErrorResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -85,7 +85,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: BackupCreateRequest, -) -> Response[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Response[Any | ErrorResponse | HTTPValidationError]: """Create Backup Create a backup of the graph database. @@ -158,7 +158,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError]] + Response[Any | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -178,7 +178,7 @@ def sync( *, client: AuthenticatedClient, body: BackupCreateRequest, -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Any | ErrorResponse | HTTPValidationError | None: """Create Backup Create a backup of the graph database. @@ -251,7 +251,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, ErrorResponse, HTTPValidationError] + Any | ErrorResponse | HTTPValidationError """ return sync_detailed( @@ -266,7 +266,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: BackupCreateRequest, -) -> Response[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Response[Any | ErrorResponse | HTTPValidationError]: """Create Backup Create a backup of the graph database. @@ -339,7 +339,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError]] + Response[Any | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -357,7 +357,7 @@ async def asyncio( *, client: AuthenticatedClient, body: BackupCreateRequest, -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Any | ErrorResponse | HTTPValidationError | None: """Create Backup Create a backup of the graph database. @@ -430,7 +430,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, ErrorResponse, HTTPValidationError] + Any | ErrorResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/backup/get_backup_download_url.py b/robosystems_client/api/backup/get_backup_download_url.py index 9793968..c618995 100644 --- a/robosystems_client/api/backup/get_backup_download_url.py +++ b/robosystems_client/api/backup/get_backup_download_url.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -14,7 +14,7 @@ def _get_kwargs( graph_id: str, backup_id: str, *, - expires_in: Union[Unset, int] = 3600, + expires_in: int | Unset = 3600, ) -> dict[str, Any]: params: dict[str, Any] = {} @@ -32,8 +32,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, BackupDownloadUrlResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | BackupDownloadUrlResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = BackupDownloadUrlResponse.from_dict(response.json()) @@ -63,8 +63,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, BackupDownloadUrlResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | BackupDownloadUrlResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -78,8 +78,8 @@ def sync_detailed( backup_id: str, *, client: AuthenticatedClient, - expires_in: Union[Unset, int] = 3600, -) -> Response[Union[Any, BackupDownloadUrlResponse, HTTPValidationError]]: + expires_in: int | Unset = 3600, +) -> Response[Any | BackupDownloadUrlResponse | HTTPValidationError]: """Get temporary download URL for backup Generate a temporary download URL for a backup (unencrypted, compressed .kuzu files only) @@ -87,14 +87,14 @@ def sync_detailed( Args: graph_id (str): backup_id (str): Backup identifier - expires_in (Union[Unset, int]): URL expiration time in seconds Default: 3600. + expires_in (int | Unset): URL expiration time in seconds Default: 3600. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, BackupDownloadUrlResponse, HTTPValidationError]] + Response[Any | BackupDownloadUrlResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -115,8 +115,8 @@ def sync( backup_id: str, *, client: AuthenticatedClient, - expires_in: Union[Unset, int] = 3600, -) -> Optional[Union[Any, BackupDownloadUrlResponse, HTTPValidationError]]: + expires_in: int | Unset = 3600, +) -> Any | BackupDownloadUrlResponse | HTTPValidationError | None: """Get temporary download URL for backup Generate a temporary download URL for a backup (unencrypted, compressed .kuzu files only) @@ -124,14 +124,14 @@ def sync( Args: graph_id (str): backup_id (str): Backup identifier - expires_in (Union[Unset, int]): URL expiration time in seconds Default: 3600. + expires_in (int | Unset): URL expiration time in seconds Default: 3600. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, BackupDownloadUrlResponse, HTTPValidationError] + Any | BackupDownloadUrlResponse | HTTPValidationError """ return sync_detailed( @@ -147,8 +147,8 @@ async def asyncio_detailed( backup_id: str, *, client: AuthenticatedClient, - expires_in: Union[Unset, int] = 3600, -) -> Response[Union[Any, BackupDownloadUrlResponse, HTTPValidationError]]: + expires_in: int | Unset = 3600, +) -> Response[Any | BackupDownloadUrlResponse | HTTPValidationError]: """Get temporary download URL for backup Generate a temporary download URL for a backup (unencrypted, compressed .kuzu files only) @@ -156,14 +156,14 @@ async def asyncio_detailed( Args: graph_id (str): backup_id (str): Backup identifier - expires_in (Union[Unset, int]): URL expiration time in seconds Default: 3600. + expires_in (int | Unset): URL expiration time in seconds Default: 3600. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, BackupDownloadUrlResponse, HTTPValidationError]] + Response[Any | BackupDownloadUrlResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -182,8 +182,8 @@ async def asyncio( backup_id: str, *, client: AuthenticatedClient, - expires_in: Union[Unset, int] = 3600, -) -> Optional[Union[Any, BackupDownloadUrlResponse, HTTPValidationError]]: + expires_in: int | Unset = 3600, +) -> Any | BackupDownloadUrlResponse | HTTPValidationError | None: """Get temporary download URL for backup Generate a temporary download URL for a backup (unencrypted, compressed .kuzu files only) @@ -191,14 +191,14 @@ async def asyncio( Args: graph_id (str): backup_id (str): Backup identifier - expires_in (Union[Unset, int]): URL expiration time in seconds Default: 3600. + expires_in (int | Unset): URL expiration time in seconds Default: 3600. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, BackupDownloadUrlResponse, HTTPValidationError] + Any | BackupDownloadUrlResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/backup/get_backup_stats.py b/robosystems_client/api/backup/get_backup_stats.py index 1d527bd..6ac3af5 100644 --- a/robosystems_client/api/backup/get_backup_stats.py +++ b/robosystems_client/api/backup/get_backup_stats.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -22,8 +22,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[BackupStatsResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> BackupStatsResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = BackupStatsResponse.from_dict(response.json()) @@ -41,8 +41,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[BackupStatsResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[BackupStatsResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -55,7 +55,7 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[BackupStatsResponse, HTTPValidationError]]: +) -> Response[BackupStatsResponse | HTTPValidationError]: """Get backup statistics Get comprehensive backup statistics for the specified graph database @@ -68,7 +68,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[BackupStatsResponse, HTTPValidationError]] + Response[BackupStatsResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -86,7 +86,7 @@ def sync( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[BackupStatsResponse, HTTPValidationError]]: +) -> BackupStatsResponse | HTTPValidationError | None: """Get backup statistics Get comprehensive backup statistics for the specified graph database @@ -99,7 +99,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[BackupStatsResponse, HTTPValidationError] + BackupStatsResponse | HTTPValidationError """ return sync_detailed( @@ -112,7 +112,7 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[BackupStatsResponse, HTTPValidationError]]: +) -> Response[BackupStatsResponse | HTTPValidationError]: """Get backup statistics Get comprehensive backup statistics for the specified graph database @@ -125,7 +125,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[BackupStatsResponse, HTTPValidationError]] + Response[BackupStatsResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -141,7 +141,7 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[BackupStatsResponse, HTTPValidationError]]: +) -> BackupStatsResponse | HTTPValidationError | None: """Get backup statistics Get comprehensive backup statistics for the specified graph database @@ -154,7 +154,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[BackupStatsResponse, HTTPValidationError] + BackupStatsResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/backup/list_backups.py b/robosystems_client/api/backup/list_backups.py index ad53cfd..da9ef64 100644 --- a/robosystems_client/api/backup/list_backups.py +++ b/robosystems_client/api/backup/list_backups.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -13,8 +13,8 @@ def _get_kwargs( graph_id: str, *, - limit: Union[Unset, int] = 50, - offset: Union[Unset, int] = 0, + limit: int | Unset = 50, + offset: int | Unset = 0, ) -> dict[str, Any]: params: dict[str, Any] = {} @@ -34,8 +34,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[BackupListResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> BackupListResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = BackupListResponse.from_dict(response.json()) @@ -53,8 +53,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[BackupListResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[BackupListResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -67,24 +67,24 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, - limit: Union[Unset, int] = 50, - offset: Union[Unset, int] = 0, -) -> Response[Union[BackupListResponse, HTTPValidationError]]: + limit: int | Unset = 50, + offset: int | Unset = 0, +) -> Response[BackupListResponse | HTTPValidationError]: """List graph database backups List all backups for the specified graph database Args: graph_id (str): - limit (Union[Unset, int]): Maximum number of backups to return Default: 50. - offset (Union[Unset, int]): Number of backups to skip Default: 0. + limit (int | Unset): Maximum number of backups to return Default: 50. + offset (int | Unset): Number of backups to skip Default: 0. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[BackupListResponse, HTTPValidationError]] + Response[BackupListResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -104,24 +104,24 @@ def sync( graph_id: str, *, client: AuthenticatedClient, - limit: Union[Unset, int] = 50, - offset: Union[Unset, int] = 0, -) -> Optional[Union[BackupListResponse, HTTPValidationError]]: + limit: int | Unset = 50, + offset: int | Unset = 0, +) -> BackupListResponse | HTTPValidationError | None: """List graph database backups List all backups for the specified graph database Args: graph_id (str): - limit (Union[Unset, int]): Maximum number of backups to return Default: 50. - offset (Union[Unset, int]): Number of backups to skip Default: 0. + limit (int | Unset): Maximum number of backups to return Default: 50. + offset (int | Unset): Number of backups to skip Default: 0. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[BackupListResponse, HTTPValidationError] + BackupListResponse | HTTPValidationError """ return sync_detailed( @@ -136,24 +136,24 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, - limit: Union[Unset, int] = 50, - offset: Union[Unset, int] = 0, -) -> Response[Union[BackupListResponse, HTTPValidationError]]: + limit: int | Unset = 50, + offset: int | Unset = 0, +) -> Response[BackupListResponse | HTTPValidationError]: """List graph database backups List all backups for the specified graph database Args: graph_id (str): - limit (Union[Unset, int]): Maximum number of backups to return Default: 50. - offset (Union[Unset, int]): Number of backups to skip Default: 0. + limit (int | Unset): Maximum number of backups to return Default: 50. + offset (int | Unset): Number of backups to skip Default: 0. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[BackupListResponse, HTTPValidationError]] + Response[BackupListResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -171,24 +171,24 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, - limit: Union[Unset, int] = 50, - offset: Union[Unset, int] = 0, -) -> Optional[Union[BackupListResponse, HTTPValidationError]]: + limit: int | Unset = 50, + offset: int | Unset = 0, +) -> BackupListResponse | HTTPValidationError | None: """List graph database backups List all backups for the specified graph database Args: graph_id (str): - limit (Union[Unset, int]): Maximum number of backups to return Default: 50. - offset (Union[Unset, int]): Number of backups to skip Default: 0. + limit (int | Unset): Maximum number of backups to return Default: 50. + offset (int | Unset): Number of backups to skip Default: 0. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[BackupListResponse, HTTPValidationError] + BackupListResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/backup/restore_backup.py b/robosystems_client/api/backup/restore_backup.py index 872c3bc..deee42c 100644 --- a/robosystems_client/api/backup/restore_backup.py +++ b/robosystems_client/api/backup/restore_backup.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -33,8 +33,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | ErrorResponse | HTTPValidationError | None: if response.status_code == 202: response_202 = response.json() return response_202 @@ -71,8 +71,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | ErrorResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -87,7 +87,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: BackupRestoreRequest, -) -> Response[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Response[Any | ErrorResponse | HTTPValidationError]: """Restore Encrypted Backup Restore a graph database from an encrypted backup. @@ -166,7 +166,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError]] + Response[Any | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -188,7 +188,7 @@ def sync( *, client: AuthenticatedClient, body: BackupRestoreRequest, -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Any | ErrorResponse | HTTPValidationError | None: """Restore Encrypted Backup Restore a graph database from an encrypted backup. @@ -267,7 +267,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, ErrorResponse, HTTPValidationError] + Any | ErrorResponse | HTTPValidationError """ return sync_detailed( @@ -284,7 +284,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: BackupRestoreRequest, -) -> Response[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Response[Any | ErrorResponse | HTTPValidationError]: """Restore Encrypted Backup Restore a graph database from an encrypted backup. @@ -363,7 +363,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError]] + Response[Any | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -383,7 +383,7 @@ async def asyncio( *, client: AuthenticatedClient, body: BackupRestoreRequest, -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Any | ErrorResponse | HTTPValidationError | None: """Restore Encrypted Backup Restore a graph database from an encrypted backup. @@ -462,7 +462,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, ErrorResponse, HTTPValidationError] + Any | ErrorResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/billing/cancel_org_subscription.py b/robosystems_client/api/billing/cancel_org_subscription.py index a0b0198..0fe000b 100644 --- a/robosystems_client/api/billing/cancel_org_subscription.py +++ b/robosystems_client/api/billing/cancel_org_subscription.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -23,8 +23,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[GraphSubscriptionResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> GraphSubscriptionResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = GraphSubscriptionResponse.from_dict(response.json()) @@ -42,8 +42,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[GraphSubscriptionResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[GraphSubscriptionResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -57,7 +57,7 @@ def sync_detailed( subscription_id: str, *, client: AuthenticatedClient, -) -> Response[Union[GraphSubscriptionResponse, HTTPValidationError]]: +) -> Response[GraphSubscriptionResponse | HTTPValidationError]: """Cancel Organization Subscription Cancel an organization subscription. @@ -76,7 +76,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[GraphSubscriptionResponse, HTTPValidationError]] + Response[GraphSubscriptionResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -96,7 +96,7 @@ def sync( subscription_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[GraphSubscriptionResponse, HTTPValidationError]]: +) -> GraphSubscriptionResponse | HTTPValidationError | None: """Cancel Organization Subscription Cancel an organization subscription. @@ -115,7 +115,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[GraphSubscriptionResponse, HTTPValidationError] + GraphSubscriptionResponse | HTTPValidationError """ return sync_detailed( @@ -130,7 +130,7 @@ async def asyncio_detailed( subscription_id: str, *, client: AuthenticatedClient, -) -> Response[Union[GraphSubscriptionResponse, HTTPValidationError]]: +) -> Response[GraphSubscriptionResponse | HTTPValidationError]: """Cancel Organization Subscription Cancel an organization subscription. @@ -149,7 +149,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[GraphSubscriptionResponse, HTTPValidationError]] + Response[GraphSubscriptionResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -167,7 +167,7 @@ async def asyncio( subscription_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[GraphSubscriptionResponse, HTTPValidationError]]: +) -> GraphSubscriptionResponse | HTTPValidationError | None: """Cancel Organization Subscription Cancel an organization subscription. @@ -186,7 +186,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[GraphSubscriptionResponse, HTTPValidationError] + GraphSubscriptionResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/billing/create_checkout_session.py b/robosystems_client/api/billing/create_checkout_session.py index 6df4073..e637d18 100644 --- a/robosystems_client/api/billing/create_checkout_session.py +++ b/robosystems_client/api/billing/create_checkout_session.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -31,8 +31,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[CheckoutResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> CheckoutResponse | HTTPValidationError | None: if response.status_code == 201: response_201 = CheckoutResponse.from_dict(response.json()) @@ -50,8 +50,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[CheckoutResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[CheckoutResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -64,7 +64,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: CreateCheckoutRequest, -) -> Response[Union[CheckoutResponse, HTTPValidationError]]: +) -> Response[CheckoutResponse | HTTPValidationError]: """Create Payment Checkout Session Create a Stripe checkout session for collecting payment method. @@ -93,7 +93,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[CheckoutResponse, HTTPValidationError]] + Response[CheckoutResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -111,7 +111,7 @@ def sync( *, client: AuthenticatedClient, body: CreateCheckoutRequest, -) -> Optional[Union[CheckoutResponse, HTTPValidationError]]: +) -> CheckoutResponse | HTTPValidationError | None: """Create Payment Checkout Session Create a Stripe checkout session for collecting payment method. @@ -140,7 +140,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[CheckoutResponse, HTTPValidationError] + CheckoutResponse | HTTPValidationError """ return sync_detailed( @@ -153,7 +153,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: CreateCheckoutRequest, -) -> Response[Union[CheckoutResponse, HTTPValidationError]]: +) -> Response[CheckoutResponse | HTTPValidationError]: """Create Payment Checkout Session Create a Stripe checkout session for collecting payment method. @@ -182,7 +182,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[CheckoutResponse, HTTPValidationError]] + Response[CheckoutResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -198,7 +198,7 @@ async def asyncio( *, client: AuthenticatedClient, body: CreateCheckoutRequest, -) -> Optional[Union[CheckoutResponse, HTTPValidationError]]: +) -> CheckoutResponse | HTTPValidationError | None: """Create Payment Checkout Session Create a Stripe checkout session for collecting payment method. @@ -227,7 +227,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[CheckoutResponse, HTTPValidationError] + CheckoutResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/billing/create_portal_session.py b/robosystems_client/api/billing/create_portal_session.py index 9656fac..df6da84 100644 --- a/robosystems_client/api/billing/create_portal_session.py +++ b/robosystems_client/api/billing/create_portal_session.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -22,8 +22,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, PortalSessionResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | PortalSessionResponse | None: if response.status_code == 200: response_200 = PortalSessionResponse.from_dict(response.json()) @@ -41,8 +41,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, PortalSessionResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | PortalSessionResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -55,7 +55,7 @@ def sync_detailed( org_id: str, *, client: AuthenticatedClient, -) -> Response[Union[HTTPValidationError, PortalSessionResponse]]: +) -> Response[HTTPValidationError | PortalSessionResponse]: """Create Customer Portal Session Create a Stripe Customer Portal session for managing payment methods. @@ -81,7 +81,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, PortalSessionResponse]] + Response[HTTPValidationError | PortalSessionResponse] """ kwargs = _get_kwargs( @@ -99,7 +99,7 @@ def sync( org_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[HTTPValidationError, PortalSessionResponse]]: +) -> HTTPValidationError | PortalSessionResponse | None: """Create Customer Portal Session Create a Stripe Customer Portal session for managing payment methods. @@ -125,7 +125,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, PortalSessionResponse] + HTTPValidationError | PortalSessionResponse """ return sync_detailed( @@ -138,7 +138,7 @@ async def asyncio_detailed( org_id: str, *, client: AuthenticatedClient, -) -> Response[Union[HTTPValidationError, PortalSessionResponse]]: +) -> Response[HTTPValidationError | PortalSessionResponse]: """Create Customer Portal Session Create a Stripe Customer Portal session for managing payment methods. @@ -164,7 +164,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, PortalSessionResponse]] + Response[HTTPValidationError | PortalSessionResponse] """ kwargs = _get_kwargs( @@ -180,7 +180,7 @@ async def asyncio( org_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[HTTPValidationError, PortalSessionResponse]]: +) -> HTTPValidationError | PortalSessionResponse | None: """Create Customer Portal Session Create a Stripe Customer Portal session for managing payment methods. @@ -206,7 +206,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, PortalSessionResponse] + HTTPValidationError | PortalSessionResponse """ return ( diff --git a/robosystems_client/api/billing/get_checkout_status.py b/robosystems_client/api/billing/get_checkout_status.py index fb40499..bed3859 100644 --- a/robosystems_client/api/billing/get_checkout_status.py +++ b/robosystems_client/api/billing/get_checkout_status.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -22,8 +22,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[CheckoutStatusResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> CheckoutStatusResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = CheckoutStatusResponse.from_dict(response.json()) @@ -41,8 +41,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[CheckoutStatusResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[CheckoutStatusResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -55,7 +55,7 @@ def sync_detailed( session_id: str, *, client: AuthenticatedClient, -) -> Response[Union[CheckoutStatusResponse, HTTPValidationError]]: +) -> Response[CheckoutStatusResponse | HTTPValidationError]: """Get Checkout Session Status Poll the status of a checkout session. @@ -82,7 +82,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[CheckoutStatusResponse, HTTPValidationError]] + Response[CheckoutStatusResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -100,7 +100,7 @@ def sync( session_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[CheckoutStatusResponse, HTTPValidationError]]: +) -> CheckoutStatusResponse | HTTPValidationError | None: """Get Checkout Session Status Poll the status of a checkout session. @@ -127,7 +127,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[CheckoutStatusResponse, HTTPValidationError] + CheckoutStatusResponse | HTTPValidationError """ return sync_detailed( @@ -140,7 +140,7 @@ async def asyncio_detailed( session_id: str, *, client: AuthenticatedClient, -) -> Response[Union[CheckoutStatusResponse, HTTPValidationError]]: +) -> Response[CheckoutStatusResponse | HTTPValidationError]: """Get Checkout Session Status Poll the status of a checkout session. @@ -167,7 +167,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[CheckoutStatusResponse, HTTPValidationError]] + Response[CheckoutStatusResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -183,7 +183,7 @@ async def asyncio( session_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[CheckoutStatusResponse, HTTPValidationError]]: +) -> CheckoutStatusResponse | HTTPValidationError | None: """Get Checkout Session Status Poll the status of a checkout session. @@ -210,7 +210,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[CheckoutStatusResponse, HTTPValidationError] + CheckoutStatusResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/billing/get_org_billing_customer.py b/robosystems_client/api/billing/get_org_billing_customer.py index e59d41b..7b80001 100644 --- a/robosystems_client/api/billing/get_org_billing_customer.py +++ b/robosystems_client/api/billing/get_org_billing_customer.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -22,8 +22,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[BillingCustomer, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> BillingCustomer | HTTPValidationError | None: if response.status_code == 200: response_200 = BillingCustomer.from_dict(response.json()) @@ -41,8 +41,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[BillingCustomer, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[BillingCustomer | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -55,7 +55,7 @@ def sync_detailed( org_id: str, *, client: AuthenticatedClient, -) -> Response[Union[BillingCustomer, HTTPValidationError]]: +) -> Response[BillingCustomer | HTTPValidationError]: """Get Organization Customer Info Get billing customer information for an organization including payment methods on file. @@ -74,7 +74,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[BillingCustomer, HTTPValidationError]] + Response[BillingCustomer | HTTPValidationError] """ kwargs = _get_kwargs( @@ -92,7 +92,7 @@ def sync( org_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[BillingCustomer, HTTPValidationError]]: +) -> BillingCustomer | HTTPValidationError | None: """Get Organization Customer Info Get billing customer information for an organization including payment methods on file. @@ -111,7 +111,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[BillingCustomer, HTTPValidationError] + BillingCustomer | HTTPValidationError """ return sync_detailed( @@ -124,7 +124,7 @@ async def asyncio_detailed( org_id: str, *, client: AuthenticatedClient, -) -> Response[Union[BillingCustomer, HTTPValidationError]]: +) -> Response[BillingCustomer | HTTPValidationError]: """Get Organization Customer Info Get billing customer information for an organization including payment methods on file. @@ -143,7 +143,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[BillingCustomer, HTTPValidationError]] + Response[BillingCustomer | HTTPValidationError] """ kwargs = _get_kwargs( @@ -159,7 +159,7 @@ async def asyncio( org_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[BillingCustomer, HTTPValidationError]]: +) -> BillingCustomer | HTTPValidationError | None: """Get Organization Customer Info Get billing customer information for an organization including payment methods on file. @@ -178,7 +178,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[BillingCustomer, HTTPValidationError] + BillingCustomer | HTTPValidationError """ return ( diff --git a/robosystems_client/api/billing/get_org_subscription.py b/robosystems_client/api/billing/get_org_subscription.py index 331ddeb..b6e7d72 100644 --- a/robosystems_client/api/billing/get_org_subscription.py +++ b/robosystems_client/api/billing/get_org_subscription.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -23,8 +23,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[GraphSubscriptionResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> GraphSubscriptionResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = GraphSubscriptionResponse.from_dict(response.json()) @@ -42,8 +42,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[GraphSubscriptionResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[GraphSubscriptionResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -57,7 +57,7 @@ def sync_detailed( subscription_id: str, *, client: AuthenticatedClient, -) -> Response[Union[GraphSubscriptionResponse, HTTPValidationError]]: +) -> Response[GraphSubscriptionResponse | HTTPValidationError]: """Get Organization Subscription Details Get detailed information about a specific subscription. @@ -74,7 +74,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[GraphSubscriptionResponse, HTTPValidationError]] + Response[GraphSubscriptionResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -94,7 +94,7 @@ def sync( subscription_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[GraphSubscriptionResponse, HTTPValidationError]]: +) -> GraphSubscriptionResponse | HTTPValidationError | None: """Get Organization Subscription Details Get detailed information about a specific subscription. @@ -111,7 +111,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[GraphSubscriptionResponse, HTTPValidationError] + GraphSubscriptionResponse | HTTPValidationError """ return sync_detailed( @@ -126,7 +126,7 @@ async def asyncio_detailed( subscription_id: str, *, client: AuthenticatedClient, -) -> Response[Union[GraphSubscriptionResponse, HTTPValidationError]]: +) -> Response[GraphSubscriptionResponse | HTTPValidationError]: """Get Organization Subscription Details Get detailed information about a specific subscription. @@ -143,7 +143,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[GraphSubscriptionResponse, HTTPValidationError]] + Response[GraphSubscriptionResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -161,7 +161,7 @@ async def asyncio( subscription_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[GraphSubscriptionResponse, HTTPValidationError]]: +) -> GraphSubscriptionResponse | HTTPValidationError | None: """Get Organization Subscription Details Get detailed information about a specific subscription. @@ -178,7 +178,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[GraphSubscriptionResponse, HTTPValidationError] + GraphSubscriptionResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/billing/get_org_upcoming_invoice.py b/robosystems_client/api/billing/get_org_upcoming_invoice.py index de62e5c..8717d41 100644 --- a/robosystems_client/api/billing/get_org_upcoming_invoice.py +++ b/robosystems_client/api/billing/get_org_upcoming_invoice.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -22,11 +22,11 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, Union["UpcomingInvoice", None]]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | None | UpcomingInvoice | None: if response.status_code == 200: - def _parse_response_200(data: object) -> Union["UpcomingInvoice", None]: + def _parse_response_200(data: object) -> None | UpcomingInvoice: if data is None: return data try: @@ -35,9 +35,9 @@ def _parse_response_200(data: object) -> Union["UpcomingInvoice", None]: response_200_type_0 = UpcomingInvoice.from_dict(data) return response_200_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["UpcomingInvoice", None], data) + return cast(None | UpcomingInvoice, data) response_200 = _parse_response_200(response.json()) @@ -55,8 +55,8 @@ def _parse_response_200(data: object) -> Union["UpcomingInvoice", None]: def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, Union["UpcomingInvoice", None]]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | None | UpcomingInvoice]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -69,7 +69,7 @@ def sync_detailed( org_id: str, *, client: AuthenticatedClient, -) -> Response[Union[HTTPValidationError, Union["UpcomingInvoice", None]]]: +) -> Response[HTTPValidationError | None | UpcomingInvoice]: """Get Organization Upcoming Invoice Get preview of the next invoice for an organization. @@ -88,7 +88,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, Union['UpcomingInvoice', None]]] + Response[HTTPValidationError | None | UpcomingInvoice] """ kwargs = _get_kwargs( @@ -106,7 +106,7 @@ def sync( org_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[HTTPValidationError, Union["UpcomingInvoice", None]]]: +) -> HTTPValidationError | None | UpcomingInvoice | None: """Get Organization Upcoming Invoice Get preview of the next invoice for an organization. @@ -125,7 +125,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, Union['UpcomingInvoice', None]] + HTTPValidationError | None | UpcomingInvoice """ return sync_detailed( @@ -138,7 +138,7 @@ async def asyncio_detailed( org_id: str, *, client: AuthenticatedClient, -) -> Response[Union[HTTPValidationError, Union["UpcomingInvoice", None]]]: +) -> Response[HTTPValidationError | None | UpcomingInvoice]: """Get Organization Upcoming Invoice Get preview of the next invoice for an organization. @@ -157,7 +157,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, Union['UpcomingInvoice', None]]] + Response[HTTPValidationError | None | UpcomingInvoice] """ kwargs = _get_kwargs( @@ -173,7 +173,7 @@ async def asyncio( org_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[HTTPValidationError, Union["UpcomingInvoice", None]]]: +) -> HTTPValidationError | None | UpcomingInvoice | None: """Get Organization Upcoming Invoice Get preview of the next invoice for an organization. @@ -192,7 +192,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, Union['UpcomingInvoice', None]] + HTTPValidationError | None | UpcomingInvoice """ return ( diff --git a/robosystems_client/api/billing/list_org_invoices.py b/robosystems_client/api/billing/list_org_invoices.py index 47c0934..ab58615 100644 --- a/robosystems_client/api/billing/list_org_invoices.py +++ b/robosystems_client/api/billing/list_org_invoices.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -13,7 +13,7 @@ def _get_kwargs( org_id: str, *, - limit: Union[Unset, int] = 10, + limit: int | Unset = 10, ) -> dict[str, Any]: params: dict[str, Any] = {} @@ -31,8 +31,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, InvoicesResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | InvoicesResponse | None: if response.status_code == 200: response_200 = InvoicesResponse.from_dict(response.json()) @@ -50,8 +50,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, InvoicesResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | InvoicesResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -64,8 +64,8 @@ def sync_detailed( org_id: str, *, client: AuthenticatedClient, - limit: Union[Unset, int] = 10, -) -> Response[Union[HTTPValidationError, InvoicesResponse]]: + limit: int | Unset = 10, +) -> Response[HTTPValidationError | InvoicesResponse]: """List Organization Invoices List payment history and invoices for an organization. @@ -78,14 +78,14 @@ def sync_detailed( Args: org_id (str): - limit (Union[Unset, int]): Number of invoices to return Default: 10. + limit (int | Unset): Number of invoices to return Default: 10. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, InvoicesResponse]] + Response[HTTPValidationError | InvoicesResponse] """ kwargs = _get_kwargs( @@ -104,8 +104,8 @@ def sync( org_id: str, *, client: AuthenticatedClient, - limit: Union[Unset, int] = 10, -) -> Optional[Union[HTTPValidationError, InvoicesResponse]]: + limit: int | Unset = 10, +) -> HTTPValidationError | InvoicesResponse | None: """List Organization Invoices List payment history and invoices for an organization. @@ -118,14 +118,14 @@ def sync( Args: org_id (str): - limit (Union[Unset, int]): Number of invoices to return Default: 10. + limit (int | Unset): Number of invoices to return Default: 10. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, InvoicesResponse] + HTTPValidationError | InvoicesResponse """ return sync_detailed( @@ -139,8 +139,8 @@ async def asyncio_detailed( org_id: str, *, client: AuthenticatedClient, - limit: Union[Unset, int] = 10, -) -> Response[Union[HTTPValidationError, InvoicesResponse]]: + limit: int | Unset = 10, +) -> Response[HTTPValidationError | InvoicesResponse]: """List Organization Invoices List payment history and invoices for an organization. @@ -153,14 +153,14 @@ async def asyncio_detailed( Args: org_id (str): - limit (Union[Unset, int]): Number of invoices to return Default: 10. + limit (int | Unset): Number of invoices to return Default: 10. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, InvoicesResponse]] + Response[HTTPValidationError | InvoicesResponse] """ kwargs = _get_kwargs( @@ -177,8 +177,8 @@ async def asyncio( org_id: str, *, client: AuthenticatedClient, - limit: Union[Unset, int] = 10, -) -> Optional[Union[HTTPValidationError, InvoicesResponse]]: + limit: int | Unset = 10, +) -> HTTPValidationError | InvoicesResponse | None: """List Organization Invoices List payment history and invoices for an organization. @@ -191,14 +191,14 @@ async def asyncio( Args: org_id (str): - limit (Union[Unset, int]): Number of invoices to return Default: 10. + limit (int | Unset): Number of invoices to return Default: 10. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, InvoicesResponse] + HTTPValidationError | InvoicesResponse """ return ( diff --git a/robosystems_client/api/billing/list_org_subscriptions.py b/robosystems_client/api/billing/list_org_subscriptions.py index 750229d..42beec3 100644 --- a/robosystems_client/api/billing/list_org_subscriptions.py +++ b/robosystems_client/api/billing/list_org_subscriptions.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -22,8 +22,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, list["GraphSubscriptionResponse"]]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | list[GraphSubscriptionResponse] | None: if response.status_code == 200: response_200 = [] _response_200 = response.json() @@ -46,8 +46,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, list["GraphSubscriptionResponse"]]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | list[GraphSubscriptionResponse]]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -60,7 +60,7 @@ def sync_detailed( org_id: str, *, client: AuthenticatedClient, -) -> Response[Union[HTTPValidationError, list["GraphSubscriptionResponse"]]]: +) -> Response[HTTPValidationError | list[GraphSubscriptionResponse]]: """List Organization Subscriptions List all active and past subscriptions for an organization. @@ -79,7 +79,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, list['GraphSubscriptionResponse']]] + Response[HTTPValidationError | list[GraphSubscriptionResponse]] """ kwargs = _get_kwargs( @@ -97,7 +97,7 @@ def sync( org_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[HTTPValidationError, list["GraphSubscriptionResponse"]]]: +) -> HTTPValidationError | list[GraphSubscriptionResponse] | None: """List Organization Subscriptions List all active and past subscriptions for an organization. @@ -116,7 +116,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, list['GraphSubscriptionResponse']] + HTTPValidationError | list[GraphSubscriptionResponse] """ return sync_detailed( @@ -129,7 +129,7 @@ async def asyncio_detailed( org_id: str, *, client: AuthenticatedClient, -) -> Response[Union[HTTPValidationError, list["GraphSubscriptionResponse"]]]: +) -> Response[HTTPValidationError | list[GraphSubscriptionResponse]]: """List Organization Subscriptions List all active and past subscriptions for an organization. @@ -148,7 +148,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, list['GraphSubscriptionResponse']]] + Response[HTTPValidationError | list[GraphSubscriptionResponse]] """ kwargs = _get_kwargs( @@ -164,7 +164,7 @@ async def asyncio( org_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[HTTPValidationError, list["GraphSubscriptionResponse"]]]: +) -> HTTPValidationError | list[GraphSubscriptionResponse] | None: """List Organization Subscriptions List all active and past subscriptions for an organization. @@ -183,7 +183,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, list['GraphSubscriptionResponse']] + HTTPValidationError | list[GraphSubscriptionResponse] """ return ( diff --git a/robosystems_client/api/connections/create_connection.py b/robosystems_client/api/connections/create_connection.py index 64b4709..5582a31 100644 --- a/robosystems_client/api/connections/create_connection.py +++ b/robosystems_client/api/connections/create_connection.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -33,8 +33,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[ConnectionResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ConnectionResponse | ErrorResponse | HTTPValidationError | None: if response.status_code == 201: response_201 = ConnectionResponse.from_dict(response.json()) @@ -72,8 +72,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[ConnectionResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[ConnectionResponse | ErrorResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -87,7 +87,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: CreateConnectionRequest, -) -> Response[Union[ConnectionResponse, ErrorResponse, HTTPValidationError]]: +) -> Response[ConnectionResponse | ErrorResponse | HTTPValidationError]: """Create Connection Create a new data connection for external system integration. @@ -121,7 +121,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ConnectionResponse, ErrorResponse, HTTPValidationError]] + Response[ConnectionResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -141,7 +141,7 @@ def sync( *, client: AuthenticatedClient, body: CreateConnectionRequest, -) -> Optional[Union[ConnectionResponse, ErrorResponse, HTTPValidationError]]: +) -> ConnectionResponse | ErrorResponse | HTTPValidationError | None: """Create Connection Create a new data connection for external system integration. @@ -175,7 +175,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ConnectionResponse, ErrorResponse, HTTPValidationError] + ConnectionResponse | ErrorResponse | HTTPValidationError """ return sync_detailed( @@ -190,7 +190,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: CreateConnectionRequest, -) -> Response[Union[ConnectionResponse, ErrorResponse, HTTPValidationError]]: +) -> Response[ConnectionResponse | ErrorResponse | HTTPValidationError]: """Create Connection Create a new data connection for external system integration. @@ -224,7 +224,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ConnectionResponse, ErrorResponse, HTTPValidationError]] + Response[ConnectionResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -242,7 +242,7 @@ async def asyncio( *, client: AuthenticatedClient, body: CreateConnectionRequest, -) -> Optional[Union[ConnectionResponse, ErrorResponse, HTTPValidationError]]: +) -> ConnectionResponse | ErrorResponse | HTTPValidationError | None: """Create Connection Create a new data connection for external system integration. @@ -276,7 +276,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ConnectionResponse, ErrorResponse, HTTPValidationError] + ConnectionResponse | ErrorResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/connections/create_link_token.py b/robosystems_client/api/connections/create_link_token.py index c3936ae..4ac5319 100644 --- a/robosystems_client/api/connections/create_link_token.py +++ b/robosystems_client/api/connections/create_link_token.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -32,8 +32,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | ErrorResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = response.json() return response_200 @@ -65,8 +65,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | ErrorResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -80,7 +80,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: LinkTokenRequest, -) -> Response[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Response[Any | ErrorResponse | HTTPValidationError]: """Create Link Token Create a link token for embedded authentication providers. @@ -107,7 +107,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError]] + Response[Any | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -127,7 +127,7 @@ def sync( *, client: AuthenticatedClient, body: LinkTokenRequest, -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Any | ErrorResponse | HTTPValidationError | None: """Create Link Token Create a link token for embedded authentication providers. @@ -154,7 +154,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, ErrorResponse, HTTPValidationError] + Any | ErrorResponse | HTTPValidationError """ return sync_detailed( @@ -169,7 +169,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: LinkTokenRequest, -) -> Response[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Response[Any | ErrorResponse | HTTPValidationError]: """Create Link Token Create a link token for embedded authentication providers. @@ -196,7 +196,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError]] + Response[Any | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -214,7 +214,7 @@ async def asyncio( *, client: AuthenticatedClient, body: LinkTokenRequest, -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Any | ErrorResponse | HTTPValidationError | None: """Create Link Token Create a link token for embedded authentication providers. @@ -241,7 +241,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, ErrorResponse, HTTPValidationError] + Any | ErrorResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/connections/delete_connection.py b/robosystems_client/api/connections/delete_connection.py index 9209083..9aba508 100644 --- a/robosystems_client/api/connections/delete_connection.py +++ b/robosystems_client/api/connections/delete_connection.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -24,8 +24,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ErrorResponse | HTTPValidationError | SuccessResponse | None: if response.status_code == 200: response_200 = SuccessResponse.from_dict(response.json()) @@ -58,8 +58,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[ErrorResponse | HTTPValidationError | SuccessResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -73,7 +73,7 @@ def sync_detailed( connection_id: str, *, client: AuthenticatedClient, -) -> Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: +) -> Response[ErrorResponse | HTTPValidationError | SuccessResponse]: """Delete Connection Delete a data connection and clean up related resources. @@ -98,7 +98,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]] + Response[ErrorResponse | HTTPValidationError | SuccessResponse] """ kwargs = _get_kwargs( @@ -118,7 +118,7 @@ def sync( connection_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: +) -> ErrorResponse | HTTPValidationError | SuccessResponse | None: """Delete Connection Delete a data connection and clean up related resources. @@ -143,7 +143,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, SuccessResponse] + ErrorResponse | HTTPValidationError | SuccessResponse """ return sync_detailed( @@ -158,7 +158,7 @@ async def asyncio_detailed( connection_id: str, *, client: AuthenticatedClient, -) -> Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: +) -> Response[ErrorResponse | HTTPValidationError | SuccessResponse]: """Delete Connection Delete a data connection and clean up related resources. @@ -183,7 +183,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]] + Response[ErrorResponse | HTTPValidationError | SuccessResponse] """ kwargs = _get_kwargs( @@ -201,7 +201,7 @@ async def asyncio( connection_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: +) -> ErrorResponse | HTTPValidationError | SuccessResponse | None: """Delete Connection Delete a data connection and clean up related resources. @@ -226,7 +226,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, SuccessResponse] + ErrorResponse | HTTPValidationError | SuccessResponse """ return ( diff --git a/robosystems_client/api/connections/exchange_link_token.py b/robosystems_client/api/connections/exchange_link_token.py index df3f0a9..8a7b5ac 100644 --- a/robosystems_client/api/connections/exchange_link_token.py +++ b/robosystems_client/api/connections/exchange_link_token.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -32,8 +32,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | ErrorResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = response.json() return response_200 @@ -65,8 +65,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | ErrorResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -80,7 +80,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: ExchangeTokenRequest, -) -> Response[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Response[Any | ErrorResponse | HTTPValidationError]: """Exchange Link Token Exchange a public token for permanent access credentials. @@ -112,7 +112,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError]] + Response[Any | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -132,7 +132,7 @@ def sync( *, client: AuthenticatedClient, body: ExchangeTokenRequest, -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Any | ErrorResponse | HTTPValidationError | None: """Exchange Link Token Exchange a public token for permanent access credentials. @@ -164,7 +164,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, ErrorResponse, HTTPValidationError] + Any | ErrorResponse | HTTPValidationError """ return sync_detailed( @@ -179,7 +179,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: ExchangeTokenRequest, -) -> Response[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Response[Any | ErrorResponse | HTTPValidationError]: """Exchange Link Token Exchange a public token for permanent access credentials. @@ -211,7 +211,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError]] + Response[Any | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -229,7 +229,7 @@ async def asyncio( *, client: AuthenticatedClient, body: ExchangeTokenRequest, -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Any | ErrorResponse | HTTPValidationError | None: """Exchange Link Token Exchange a public token for permanent access credentials. @@ -261,7 +261,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, ErrorResponse, HTTPValidationError] + Any | ErrorResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/connections/get_connection.py b/robosystems_client/api/connections/get_connection.py index fb5dd1d..388d96f 100644 --- a/robosystems_client/api/connections/get_connection.py +++ b/robosystems_client/api/connections/get_connection.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -24,8 +24,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[ConnectionResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ConnectionResponse | ErrorResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = ConnectionResponse.from_dict(response.json()) @@ -58,8 +58,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[ConnectionResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[ConnectionResponse | ErrorResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -73,7 +73,7 @@ def sync_detailed( connection_id: str, *, client: AuthenticatedClient, -) -> Response[Union[ConnectionResponse, ErrorResponse, HTTPValidationError]]: +) -> Response[ConnectionResponse | ErrorResponse | HTTPValidationError]: """Get Connection Get detailed information about a specific connection. @@ -96,7 +96,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ConnectionResponse, ErrorResponse, HTTPValidationError]] + Response[ConnectionResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -116,7 +116,7 @@ def sync( connection_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[ConnectionResponse, ErrorResponse, HTTPValidationError]]: +) -> ConnectionResponse | ErrorResponse | HTTPValidationError | None: """Get Connection Get detailed information about a specific connection. @@ -139,7 +139,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ConnectionResponse, ErrorResponse, HTTPValidationError] + ConnectionResponse | ErrorResponse | HTTPValidationError """ return sync_detailed( @@ -154,7 +154,7 @@ async def asyncio_detailed( connection_id: str, *, client: AuthenticatedClient, -) -> Response[Union[ConnectionResponse, ErrorResponse, HTTPValidationError]]: +) -> Response[ConnectionResponse | ErrorResponse | HTTPValidationError]: """Get Connection Get detailed information about a specific connection. @@ -177,7 +177,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ConnectionResponse, ErrorResponse, HTTPValidationError]] + Response[ConnectionResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -195,7 +195,7 @@ async def asyncio( connection_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[ConnectionResponse, ErrorResponse, HTTPValidationError]]: +) -> ConnectionResponse | ErrorResponse | HTTPValidationError | None: """Get Connection Get detailed information about a specific connection. @@ -218,7 +218,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ConnectionResponse, ErrorResponse, HTTPValidationError] + ConnectionResponse | ErrorResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/connections/get_connection_options.py b/robosystems_client/api/connections/get_connection_options.py index a92a8fe..a065b4f 100644 --- a/robosystems_client/api/connections/get_connection_options.py +++ b/robosystems_client/api/connections/get_connection_options.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -23,8 +23,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[ConnectionOptionsResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ConnectionOptionsResponse | ErrorResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = ConnectionOptionsResponse.from_dict(response.json()) @@ -52,8 +52,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[ConnectionOptionsResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[ConnectionOptionsResponse | ErrorResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -66,7 +66,7 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[ConnectionOptionsResponse, ErrorResponse, HTTPValidationError]]: +) -> Response[ConnectionOptionsResponse | ErrorResponse | HTTPValidationError]: """List Connection Options Get metadata about all available data connection providers. @@ -98,7 +98,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ConnectionOptionsResponse, ErrorResponse, HTTPValidationError]] + Response[ConnectionOptionsResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -116,7 +116,7 @@ def sync( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[ConnectionOptionsResponse, ErrorResponse, HTTPValidationError]]: +) -> ConnectionOptionsResponse | ErrorResponse | HTTPValidationError | None: """List Connection Options Get metadata about all available data connection providers. @@ -148,7 +148,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ConnectionOptionsResponse, ErrorResponse, HTTPValidationError] + ConnectionOptionsResponse | ErrorResponse | HTTPValidationError """ return sync_detailed( @@ -161,7 +161,7 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[ConnectionOptionsResponse, ErrorResponse, HTTPValidationError]]: +) -> Response[ConnectionOptionsResponse | ErrorResponse | HTTPValidationError]: """List Connection Options Get metadata about all available data connection providers. @@ -193,7 +193,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ConnectionOptionsResponse, ErrorResponse, HTTPValidationError]] + Response[ConnectionOptionsResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -209,7 +209,7 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[ConnectionOptionsResponse, ErrorResponse, HTTPValidationError]]: +) -> ConnectionOptionsResponse | ErrorResponse | HTTPValidationError | None: """List Connection Options Get metadata about all available data connection providers. @@ -241,7 +241,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ConnectionOptionsResponse, ErrorResponse, HTTPValidationError] + ConnectionOptionsResponse | ErrorResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/connections/init_o_auth.py b/robosystems_client/api/connections/init_o_auth.py index 108f88c..879da23 100644 --- a/robosystems_client/api/connections/init_o_auth.py +++ b/robosystems_client/api/connections/init_o_auth.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -32,8 +32,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, OAuthInitResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | OAuthInitResponse | None: if response.status_code == 200: response_200 = OAuthInitResponse.from_dict(response.json()) @@ -51,8 +51,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, OAuthInitResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | OAuthInitResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -66,7 +66,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: OAuthInitRequest, -) -> Response[Union[HTTPValidationError, OAuthInitResponse]]: +) -> Response[HTTPValidationError | OAuthInitResponse]: """Init Oauth Initialize OAuth flow for a connection. @@ -83,7 +83,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, OAuthInitResponse]] + Response[HTTPValidationError | OAuthInitResponse] """ kwargs = _get_kwargs( @@ -103,7 +103,7 @@ def sync( *, client: AuthenticatedClient, body: OAuthInitRequest, -) -> Optional[Union[HTTPValidationError, OAuthInitResponse]]: +) -> HTTPValidationError | OAuthInitResponse | None: """Init Oauth Initialize OAuth flow for a connection. @@ -120,7 +120,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, OAuthInitResponse] + HTTPValidationError | OAuthInitResponse """ return sync_detailed( @@ -135,7 +135,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: OAuthInitRequest, -) -> Response[Union[HTTPValidationError, OAuthInitResponse]]: +) -> Response[HTTPValidationError | OAuthInitResponse]: """Init Oauth Initialize OAuth flow for a connection. @@ -152,7 +152,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, OAuthInitResponse]] + Response[HTTPValidationError | OAuthInitResponse] """ kwargs = _get_kwargs( @@ -170,7 +170,7 @@ async def asyncio( *, client: AuthenticatedClient, body: OAuthInitRequest, -) -> Optional[Union[HTTPValidationError, OAuthInitResponse]]: +) -> HTTPValidationError | OAuthInitResponse | None: """Init Oauth Initialize OAuth flow for a connection. @@ -187,7 +187,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, OAuthInitResponse] + HTTPValidationError | OAuthInitResponse """ return ( diff --git a/robosystems_client/api/connections/list_connections.py b/robosystems_client/api/connections/list_connections.py index e690c9d..b1fabe7 100644 --- a/robosystems_client/api/connections/list_connections.py +++ b/robosystems_client/api/connections/list_connections.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -15,19 +15,19 @@ def _get_kwargs( graph_id: str, *, - entity_id: Union[None, Unset, str] = UNSET, - provider: Union[ListConnectionsProviderType0, None, Unset] = UNSET, + entity_id: None | str | Unset = UNSET, + provider: ListConnectionsProviderType0 | None | Unset = UNSET, ) -> dict[str, Any]: params: dict[str, Any] = {} - json_entity_id: Union[None, Unset, str] + json_entity_id: None | str | Unset if isinstance(entity_id, Unset): json_entity_id = UNSET else: json_entity_id = entity_id params["entity_id"] = json_entity_id - json_provider: Union[None, Unset, str] + json_provider: None | str | Unset if isinstance(provider, Unset): json_provider = UNSET elif isinstance(provider, ListConnectionsProviderType0): @@ -48,8 +48,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[ErrorResponse, HTTPValidationError, list["ConnectionResponse"]]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ErrorResponse | HTTPValidationError | list[ConnectionResponse] | None: if response.status_code == 200: response_200 = [] _response_200 = response.json() @@ -82,8 +82,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[ErrorResponse, HTTPValidationError, list["ConnectionResponse"]]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[ErrorResponse | HTTPValidationError | list[ConnectionResponse]]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -96,9 +96,9 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, - entity_id: Union[None, Unset, str] = UNSET, - provider: Union[ListConnectionsProviderType0, None, Unset] = UNSET, -) -> Response[Union[ErrorResponse, HTTPValidationError, list["ConnectionResponse"]]]: + entity_id: None | str | Unset = UNSET, + provider: ListConnectionsProviderType0 | None | Unset = UNSET, +) -> Response[ErrorResponse | HTTPValidationError | list[ConnectionResponse]]: """List Connections List all data connections in the graph. @@ -118,15 +118,15 @@ def sync_detailed( Args: graph_id (str): - entity_id (Union[None, Unset, str]): Filter by entity ID - provider (Union[ListConnectionsProviderType0, None, Unset]): Filter by provider type + entity_id (None | str | Unset): Filter by entity ID + provider (ListConnectionsProviderType0 | None | Unset): Filter by provider type Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, list['ConnectionResponse']]] + Response[ErrorResponse | HTTPValidationError | list[ConnectionResponse]] """ kwargs = _get_kwargs( @@ -146,9 +146,9 @@ def sync( graph_id: str, *, client: AuthenticatedClient, - entity_id: Union[None, Unset, str] = UNSET, - provider: Union[ListConnectionsProviderType0, None, Unset] = UNSET, -) -> Optional[Union[ErrorResponse, HTTPValidationError, list["ConnectionResponse"]]]: + entity_id: None | str | Unset = UNSET, + provider: ListConnectionsProviderType0 | None | Unset = UNSET, +) -> ErrorResponse | HTTPValidationError | list[ConnectionResponse] | None: """List Connections List all data connections in the graph. @@ -168,15 +168,15 @@ def sync( Args: graph_id (str): - entity_id (Union[None, Unset, str]): Filter by entity ID - provider (Union[ListConnectionsProviderType0, None, Unset]): Filter by provider type + entity_id (None | str | Unset): Filter by entity ID + provider (ListConnectionsProviderType0 | None | Unset): Filter by provider type Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, list['ConnectionResponse']] + ErrorResponse | HTTPValidationError | list[ConnectionResponse] """ return sync_detailed( @@ -191,9 +191,9 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, - entity_id: Union[None, Unset, str] = UNSET, - provider: Union[ListConnectionsProviderType0, None, Unset] = UNSET, -) -> Response[Union[ErrorResponse, HTTPValidationError, list["ConnectionResponse"]]]: + entity_id: None | str | Unset = UNSET, + provider: ListConnectionsProviderType0 | None | Unset = UNSET, +) -> Response[ErrorResponse | HTTPValidationError | list[ConnectionResponse]]: """List Connections List all data connections in the graph. @@ -213,15 +213,15 @@ async def asyncio_detailed( Args: graph_id (str): - entity_id (Union[None, Unset, str]): Filter by entity ID - provider (Union[ListConnectionsProviderType0, None, Unset]): Filter by provider type + entity_id (None | str | Unset): Filter by entity ID + provider (ListConnectionsProviderType0 | None | Unset): Filter by provider type Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, list['ConnectionResponse']]] + Response[ErrorResponse | HTTPValidationError | list[ConnectionResponse]] """ kwargs = _get_kwargs( @@ -239,9 +239,9 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, - entity_id: Union[None, Unset, str] = UNSET, - provider: Union[ListConnectionsProviderType0, None, Unset] = UNSET, -) -> Optional[Union[ErrorResponse, HTTPValidationError, list["ConnectionResponse"]]]: + entity_id: None | str | Unset = UNSET, + provider: ListConnectionsProviderType0 | None | Unset = UNSET, +) -> ErrorResponse | HTTPValidationError | list[ConnectionResponse] | None: """List Connections List all data connections in the graph. @@ -261,15 +261,15 @@ async def asyncio( Args: graph_id (str): - entity_id (Union[None, Unset, str]): Filter by entity ID - provider (Union[ListConnectionsProviderType0, None, Unset]): Filter by provider type + entity_id (None | str | Unset): Filter by entity ID + provider (ListConnectionsProviderType0 | None | Unset): Filter by provider type Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, list['ConnectionResponse']] + ErrorResponse | HTTPValidationError | list[ConnectionResponse] """ return ( diff --git a/robosystems_client/api/connections/oauth_callback.py b/robosystems_client/api/connections/oauth_callback.py index e8361e8..949de14 100644 --- a/robosystems_client/api/connections/oauth_callback.py +++ b/robosystems_client/api/connections/oauth_callback.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -33,8 +33,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | ErrorResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = response.json() return response_200 @@ -71,8 +71,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | ErrorResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -87,7 +87,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: OAuthCallbackRequest, -) -> Response[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Response[Any | ErrorResponse | HTTPValidationError]: """OAuth Callback Handle OAuth callback from provider after user authorization. @@ -120,7 +120,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError]] + Response[Any | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -142,7 +142,7 @@ def sync( *, client: AuthenticatedClient, body: OAuthCallbackRequest, -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Any | ErrorResponse | HTTPValidationError | None: """OAuth Callback Handle OAuth callback from provider after user authorization. @@ -175,7 +175,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, ErrorResponse, HTTPValidationError] + Any | ErrorResponse | HTTPValidationError """ return sync_detailed( @@ -192,7 +192,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: OAuthCallbackRequest, -) -> Response[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Response[Any | ErrorResponse | HTTPValidationError]: """OAuth Callback Handle OAuth callback from provider after user authorization. @@ -225,7 +225,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError]] + Response[Any | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -245,7 +245,7 @@ async def asyncio( *, client: AuthenticatedClient, body: OAuthCallbackRequest, -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError]]: +) -> Any | ErrorResponse | HTTPValidationError | None: """OAuth Callback Handle OAuth callback from provider after user authorization. @@ -278,7 +278,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, ErrorResponse, HTTPValidationError] + Any | ErrorResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/connections/sync_connection.py b/robosystems_client/api/connections/sync_connection.py index 3b800e4..26e53d7 100644 --- a/robosystems_client/api/connections/sync_connection.py +++ b/robosystems_client/api/connections/sync_connection.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -36,10 +36,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[ - Union[ErrorResponse, HTTPValidationError, SyncConnectionResponseSyncconnection] -]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ErrorResponse | HTTPValidationError | SyncConnectionResponseSyncconnection | None: if response.status_code == 200: response_200 = SyncConnectionResponseSyncconnection.from_dict(response.json()) @@ -72,9 +70,9 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response + *, client: AuthenticatedClient | Client, response: httpx.Response ) -> Response[ - Union[ErrorResponse, HTTPValidationError, SyncConnectionResponseSyncconnection] + ErrorResponse | HTTPValidationError | SyncConnectionResponseSyncconnection ]: return Response( status_code=HTTPStatus(response.status_code), @@ -91,7 +89,7 @@ def sync_detailed( client: AuthenticatedClient, body: SyncConnectionRequest, ) -> Response[ - Union[ErrorResponse, HTTPValidationError, SyncConnectionResponseSyncconnection] + ErrorResponse | HTTPValidationError | SyncConnectionResponseSyncconnection ]: """Sync Connection @@ -130,7 +128,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, SyncConnectionResponseSyncconnection]] + Response[ErrorResponse | HTTPValidationError | SyncConnectionResponseSyncconnection] """ kwargs = _get_kwargs( @@ -152,9 +150,7 @@ def sync( *, client: AuthenticatedClient, body: SyncConnectionRequest, -) -> Optional[ - Union[ErrorResponse, HTTPValidationError, SyncConnectionResponseSyncconnection] -]: +) -> ErrorResponse | HTTPValidationError | SyncConnectionResponseSyncconnection | None: """Sync Connection Trigger a data synchronization for the connection. @@ -192,7 +188,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, SyncConnectionResponseSyncconnection] + ErrorResponse | HTTPValidationError | SyncConnectionResponseSyncconnection """ return sync_detailed( @@ -210,7 +206,7 @@ async def asyncio_detailed( client: AuthenticatedClient, body: SyncConnectionRequest, ) -> Response[ - Union[ErrorResponse, HTTPValidationError, SyncConnectionResponseSyncconnection] + ErrorResponse | HTTPValidationError | SyncConnectionResponseSyncconnection ]: """Sync Connection @@ -249,7 +245,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, SyncConnectionResponseSyncconnection]] + Response[ErrorResponse | HTTPValidationError | SyncConnectionResponseSyncconnection] """ kwargs = _get_kwargs( @@ -269,9 +265,7 @@ async def asyncio( *, client: AuthenticatedClient, body: SyncConnectionRequest, -) -> Optional[ - Union[ErrorResponse, HTTPValidationError, SyncConnectionResponseSyncconnection] -]: +) -> ErrorResponse | HTTPValidationError | SyncConnectionResponseSyncconnection | None: """Sync Connection Trigger a data synchronization for the connection. @@ -309,7 +303,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, SyncConnectionResponseSyncconnection] + ErrorResponse | HTTPValidationError | SyncConnectionResponseSyncconnection """ return ( diff --git a/robosystems_client/api/credits_/check_credit_balance.py b/robosystems_client/api/credits_/check_credit_balance.py index 4e5c70a..d4eb4fa 100644 --- a/robosystems_client/api/credits_/check_credit_balance.py +++ b/robosystems_client/api/credits_/check_credit_balance.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -17,13 +17,13 @@ def _get_kwargs( graph_id: str, *, operation_type: str, - base_cost: Union[None, Unset, float, str] = UNSET, + base_cost: float | None | str | Unset = UNSET, ) -> dict[str, Any]: params: dict[str, Any] = {} params["operation_type"] = operation_type - json_base_cost: Union[None, Unset, float, str] + json_base_cost: float | None | str | Unset if isinstance(base_cost, Unset): json_base_cost = UNSET else: @@ -42,12 +42,13 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[ - Union[ - CheckCreditBalanceResponseCheckcreditbalance, ErrorResponse, HTTPValidationError - ] -]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ( + CheckCreditBalanceResponseCheckcreditbalance + | ErrorResponse + | HTTPValidationError + | None +): if response.status_code == 200: response_200 = CheckCreditBalanceResponseCheckcreditbalance.from_dict( response.json() @@ -82,11 +83,9 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response + *, client: AuthenticatedClient | Client, response: httpx.Response ) -> Response[ - Union[ - CheckCreditBalanceResponseCheckcreditbalance, ErrorResponse, HTTPValidationError - ] + CheckCreditBalanceResponseCheckcreditbalance | ErrorResponse | HTTPValidationError ]: return Response( status_code=HTTPStatus(response.status_code), @@ -101,11 +100,9 @@ def sync_detailed( *, client: AuthenticatedClient, operation_type: str, - base_cost: Union[None, Unset, float, str] = UNSET, + base_cost: float | None | str | Unset = UNSET, ) -> Response[ - Union[ - CheckCreditBalanceResponseCheckcreditbalance, ErrorResponse, HTTPValidationError - ] + CheckCreditBalanceResponseCheckcreditbalance | ErrorResponse | HTTPValidationError ]: """Check Credit Balance @@ -124,15 +121,14 @@ def sync_detailed( Args: graph_id (str): Graph database identifier operation_type (str): Type of operation to check - base_cost (Union[None, Unset, float, str]): Custom base cost (uses default if not - provided) + base_cost (float | None | str | Unset): Custom base cost (uses default if not provided) Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[CheckCreditBalanceResponseCheckcreditbalance, ErrorResponse, HTTPValidationError]] + Response[CheckCreditBalanceResponseCheckcreditbalance | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -153,12 +149,13 @@ def sync( *, client: AuthenticatedClient, operation_type: str, - base_cost: Union[None, Unset, float, str] = UNSET, -) -> Optional[ - Union[ - CheckCreditBalanceResponseCheckcreditbalance, ErrorResponse, HTTPValidationError - ] -]: + base_cost: float | None | str | Unset = UNSET, +) -> ( + CheckCreditBalanceResponseCheckcreditbalance + | ErrorResponse + | HTTPValidationError + | None +): """Check Credit Balance Check if the graph has sufficient credits for a planned operation. @@ -176,15 +173,14 @@ def sync( Args: graph_id (str): Graph database identifier operation_type (str): Type of operation to check - base_cost (Union[None, Unset, float, str]): Custom base cost (uses default if not - provided) + base_cost (float | None | str | Unset): Custom base cost (uses default if not provided) Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[CheckCreditBalanceResponseCheckcreditbalance, ErrorResponse, HTTPValidationError] + CheckCreditBalanceResponseCheckcreditbalance | ErrorResponse | HTTPValidationError """ return sync_detailed( @@ -200,11 +196,9 @@ async def asyncio_detailed( *, client: AuthenticatedClient, operation_type: str, - base_cost: Union[None, Unset, float, str] = UNSET, + base_cost: float | None | str | Unset = UNSET, ) -> Response[ - Union[ - CheckCreditBalanceResponseCheckcreditbalance, ErrorResponse, HTTPValidationError - ] + CheckCreditBalanceResponseCheckcreditbalance | ErrorResponse | HTTPValidationError ]: """Check Credit Balance @@ -223,15 +217,14 @@ async def asyncio_detailed( Args: graph_id (str): Graph database identifier operation_type (str): Type of operation to check - base_cost (Union[None, Unset, float, str]): Custom base cost (uses default if not - provided) + base_cost (float | None | str | Unset): Custom base cost (uses default if not provided) Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[CheckCreditBalanceResponseCheckcreditbalance, ErrorResponse, HTTPValidationError]] + Response[CheckCreditBalanceResponseCheckcreditbalance | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -250,12 +243,13 @@ async def asyncio( *, client: AuthenticatedClient, operation_type: str, - base_cost: Union[None, Unset, float, str] = UNSET, -) -> Optional[ - Union[ - CheckCreditBalanceResponseCheckcreditbalance, ErrorResponse, HTTPValidationError - ] -]: + base_cost: float | None | str | Unset = UNSET, +) -> ( + CheckCreditBalanceResponseCheckcreditbalance + | ErrorResponse + | HTTPValidationError + | None +): """Check Credit Balance Check if the graph has sufficient credits for a planned operation. @@ -273,15 +267,14 @@ async def asyncio( Args: graph_id (str): Graph database identifier operation_type (str): Type of operation to check - base_cost (Union[None, Unset, float, str]): Custom base cost (uses default if not - provided) + base_cost (float | None | str | Unset): Custom base cost (uses default if not provided) Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[CheckCreditBalanceResponseCheckcreditbalance, ErrorResponse, HTTPValidationError] + CheckCreditBalanceResponseCheckcreditbalance | ErrorResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/credits_/check_storage_limits.py b/robosystems_client/api/credits_/check_storage_limits.py index 0986333..087d32c 100644 --- a/robosystems_client/api/credits_/check_storage_limits.py +++ b/robosystems_client/api/credits_/check_storage_limits.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -23,8 +23,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[ErrorResponse, HTTPValidationError, StorageLimitResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ErrorResponse | HTTPValidationError | StorageLimitResponse | None: if response.status_code == 200: response_200 = StorageLimitResponse.from_dict(response.json()) @@ -57,8 +57,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[ErrorResponse, HTTPValidationError, StorageLimitResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[ErrorResponse | HTTPValidationError | StorageLimitResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -71,7 +71,7 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[ErrorResponse, HTTPValidationError, StorageLimitResponse]]: +) -> Response[ErrorResponse | HTTPValidationError | StorageLimitResponse]: """Check Storage Limits Check storage limits and usage for a graph. @@ -93,7 +93,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, StorageLimitResponse]] + Response[ErrorResponse | HTTPValidationError | StorageLimitResponse] """ kwargs = _get_kwargs( @@ -111,7 +111,7 @@ def sync( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[ErrorResponse, HTTPValidationError, StorageLimitResponse]]: +) -> ErrorResponse | HTTPValidationError | StorageLimitResponse | None: """Check Storage Limits Check storage limits and usage for a graph. @@ -133,7 +133,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, StorageLimitResponse] + ErrorResponse | HTTPValidationError | StorageLimitResponse """ return sync_detailed( @@ -146,7 +146,7 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[ErrorResponse, HTTPValidationError, StorageLimitResponse]]: +) -> Response[ErrorResponse | HTTPValidationError | StorageLimitResponse]: """Check Storage Limits Check storage limits and usage for a graph. @@ -168,7 +168,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, StorageLimitResponse]] + Response[ErrorResponse | HTTPValidationError | StorageLimitResponse] """ kwargs = _get_kwargs( @@ -184,7 +184,7 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[ErrorResponse, HTTPValidationError, StorageLimitResponse]]: +) -> ErrorResponse | HTTPValidationError | StorageLimitResponse | None: """Check Storage Limits Check storage limits and usage for a graph. @@ -206,7 +206,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, StorageLimitResponse] + ErrorResponse | HTTPValidationError | StorageLimitResponse """ return ( diff --git a/robosystems_client/api/credits_/get_credit_summary.py b/robosystems_client/api/credits_/get_credit_summary.py index 2e6a952..24b5c7d 100644 --- a/robosystems_client/api/credits_/get_credit_summary.py +++ b/robosystems_client/api/credits_/get_credit_summary.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -23,8 +23,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[CreditSummaryResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> CreditSummaryResponse | ErrorResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = CreditSummaryResponse.from_dict(response.json()) @@ -57,8 +57,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[CreditSummaryResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[CreditSummaryResponse | ErrorResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -71,7 +71,7 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[CreditSummaryResponse, ErrorResponse, HTTPValidationError]]: +) -> Response[CreditSummaryResponse | ErrorResponse | HTTPValidationError]: """Get Credit Summary Retrieve comprehensive credit usage summary for the specified graph. @@ -92,7 +92,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[CreditSummaryResponse, ErrorResponse, HTTPValidationError]] + Response[CreditSummaryResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -110,7 +110,7 @@ def sync( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[CreditSummaryResponse, ErrorResponse, HTTPValidationError]]: +) -> CreditSummaryResponse | ErrorResponse | HTTPValidationError | None: """Get Credit Summary Retrieve comprehensive credit usage summary for the specified graph. @@ -131,7 +131,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[CreditSummaryResponse, ErrorResponse, HTTPValidationError] + CreditSummaryResponse | ErrorResponse | HTTPValidationError """ return sync_detailed( @@ -144,7 +144,7 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[CreditSummaryResponse, ErrorResponse, HTTPValidationError]]: +) -> Response[CreditSummaryResponse | ErrorResponse | HTTPValidationError]: """Get Credit Summary Retrieve comprehensive credit usage summary for the specified graph. @@ -165,7 +165,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[CreditSummaryResponse, ErrorResponse, HTTPValidationError]] + Response[CreditSummaryResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -181,7 +181,7 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[CreditSummaryResponse, ErrorResponse, HTTPValidationError]]: +) -> CreditSummaryResponse | ErrorResponse | HTTPValidationError | None: """Get Credit Summary Retrieve comprehensive credit usage summary for the specified graph. @@ -202,7 +202,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[CreditSummaryResponse, ErrorResponse, HTTPValidationError] + CreditSummaryResponse | ErrorResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/credits_/get_storage_usage.py b/robosystems_client/api/credits_/get_storage_usage.py index 05e2e8b..26ec3ae 100644 --- a/robosystems_client/api/credits_/get_storage_usage.py +++ b/robosystems_client/api/credits_/get_storage_usage.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -16,7 +16,7 @@ def _get_kwargs( graph_id: str, *, - days: Union[Unset, int] = 30, + days: int | Unset = 30, ) -> dict[str, Any]: params: dict[str, Any] = {} @@ -34,10 +34,10 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[ - Union[ErrorResponse, GetStorageUsageResponseGetstorageusage, HTTPValidationError] -]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ( + ErrorResponse | GetStorageUsageResponseGetstorageusage | HTTPValidationError | None +): if response.status_code == 200: response_200 = GetStorageUsageResponseGetstorageusage.from_dict(response.json()) @@ -65,9 +65,9 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response + *, client: AuthenticatedClient | Client, response: httpx.Response ) -> Response[ - Union[ErrorResponse, GetStorageUsageResponseGetstorageusage, HTTPValidationError] + ErrorResponse | GetStorageUsageResponseGetstorageusage | HTTPValidationError ]: return Response( status_code=HTTPStatus(response.status_code), @@ -81,9 +81,9 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, - days: Union[Unset, int] = 30, + days: int | Unset = 30, ) -> Response[ - Union[ErrorResponse, GetStorageUsageResponseGetstorageusage, HTTPValidationError] + ErrorResponse | GetStorageUsageResponseGetstorageusage | HTTPValidationError ]: """Get Storage Usage @@ -100,14 +100,14 @@ def sync_detailed( Args: graph_id (str): Graph database identifier - days (Union[Unset, int]): Number of days of history to return Default: 30. + days (int | Unset): Number of days of history to return Default: 30. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, GetStorageUsageResponseGetstorageusage, HTTPValidationError]] + Response[ErrorResponse | GetStorageUsageResponseGetstorageusage | HTTPValidationError] """ kwargs = _get_kwargs( @@ -126,10 +126,10 @@ def sync( graph_id: str, *, client: AuthenticatedClient, - days: Union[Unset, int] = 30, -) -> Optional[ - Union[ErrorResponse, GetStorageUsageResponseGetstorageusage, HTTPValidationError] -]: + days: int | Unset = 30, +) -> ( + ErrorResponse | GetStorageUsageResponseGetstorageusage | HTTPValidationError | None +): """Get Storage Usage Get storage usage history for a graph. @@ -145,14 +145,14 @@ def sync( Args: graph_id (str): Graph database identifier - days (Union[Unset, int]): Number of days of history to return Default: 30. + days (int | Unset): Number of days of history to return Default: 30. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, GetStorageUsageResponseGetstorageusage, HTTPValidationError] + ErrorResponse | GetStorageUsageResponseGetstorageusage | HTTPValidationError """ return sync_detailed( @@ -166,9 +166,9 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, - days: Union[Unset, int] = 30, + days: int | Unset = 30, ) -> Response[ - Union[ErrorResponse, GetStorageUsageResponseGetstorageusage, HTTPValidationError] + ErrorResponse | GetStorageUsageResponseGetstorageusage | HTTPValidationError ]: """Get Storage Usage @@ -185,14 +185,14 @@ async def asyncio_detailed( Args: graph_id (str): Graph database identifier - days (Union[Unset, int]): Number of days of history to return Default: 30. + days (int | Unset): Number of days of history to return Default: 30. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, GetStorageUsageResponseGetstorageusage, HTTPValidationError]] + Response[ErrorResponse | GetStorageUsageResponseGetstorageusage | HTTPValidationError] """ kwargs = _get_kwargs( @@ -209,10 +209,10 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, - days: Union[Unset, int] = 30, -) -> Optional[ - Union[ErrorResponse, GetStorageUsageResponseGetstorageusage, HTTPValidationError] -]: + days: int | Unset = 30, +) -> ( + ErrorResponse | GetStorageUsageResponseGetstorageusage | HTTPValidationError | None +): """Get Storage Usage Get storage usage history for a graph. @@ -228,14 +228,14 @@ async def asyncio( Args: graph_id (str): Graph database identifier - days (Union[Unset, int]): Number of days of history to return Default: 30. + days (int | Unset): Number of days of history to return Default: 30. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, GetStorageUsageResponseGetstorageusage, HTTPValidationError] + ErrorResponse | GetStorageUsageResponseGetstorageusage | HTTPValidationError """ return ( diff --git a/robosystems_client/api/credits_/list_credit_transactions.py b/robosystems_client/api/credits_/list_credit_transactions.py index e804b6e..ede9588 100644 --- a/robosystems_client/api/credits_/list_credit_transactions.py +++ b/robosystems_client/api/credits_/list_credit_transactions.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -14,37 +14,37 @@ def _get_kwargs( graph_id: str, *, - transaction_type: Union[None, Unset, str] = UNSET, - operation_type: Union[None, Unset, str] = UNSET, - start_date: Union[None, Unset, str] = UNSET, - end_date: Union[None, Unset, str] = UNSET, - limit: Union[Unset, int] = 100, - offset: Union[Unset, int] = 0, + transaction_type: None | str | Unset = UNSET, + operation_type: None | str | Unset = UNSET, + start_date: None | str | Unset = UNSET, + end_date: None | str | Unset = UNSET, + limit: int | Unset = 100, + offset: int | Unset = 0, ) -> dict[str, Any]: params: dict[str, Any] = {} - json_transaction_type: Union[None, Unset, str] + json_transaction_type: None | str | Unset if isinstance(transaction_type, Unset): json_transaction_type = UNSET else: json_transaction_type = transaction_type params["transaction_type"] = json_transaction_type - json_operation_type: Union[None, Unset, str] + json_operation_type: None | str | Unset if isinstance(operation_type, Unset): json_operation_type = UNSET else: json_operation_type = operation_type params["operation_type"] = json_operation_type - json_start_date: Union[None, Unset, str] + json_start_date: None | str | Unset if isinstance(start_date, Unset): json_start_date = UNSET else: json_start_date = start_date params["start_date"] = json_start_date - json_end_date: Union[None, Unset, str] + json_end_date: None | str | Unset if isinstance(end_date, Unset): json_end_date = UNSET else: @@ -67,8 +67,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[DetailedTransactionsResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> DetailedTransactionsResponse | ErrorResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = DetailedTransactionsResponse.from_dict(response.json()) @@ -101,8 +101,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[DetailedTransactionsResponse, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[DetailedTransactionsResponse | ErrorResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -115,13 +115,13 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, - transaction_type: Union[None, Unset, str] = UNSET, - operation_type: Union[None, Unset, str] = UNSET, - start_date: Union[None, Unset, str] = UNSET, - end_date: Union[None, Unset, str] = UNSET, - limit: Union[Unset, int] = 100, - offset: Union[Unset, int] = 0, -) -> Response[Union[DetailedTransactionsResponse, ErrorResponse, HTTPValidationError]]: + transaction_type: None | str | Unset = UNSET, + operation_type: None | str | Unset = UNSET, + start_date: None | str | Unset = UNSET, + end_date: None | str | Unset = UNSET, + limit: int | Unset = 100, + offset: int | Unset = 0, +) -> Response[DetailedTransactionsResponse | ErrorResponse | HTTPValidationError]: """List Credit Transactions Retrieve detailed credit transaction history for the specified graph. @@ -142,21 +142,21 @@ def sync_detailed( Args: graph_id (str): Graph database identifier - transaction_type (Union[None, Unset, str]): Filter by transaction type (allocation, + transaction_type (None | str | Unset): Filter by transaction type (allocation, consumption, bonus, refund) - operation_type (Union[None, Unset, str]): Filter by operation type (e.g., entity_lookup, + operation_type (None | str | Unset): Filter by operation type (e.g., entity_lookup, cypher_query) - start_date (Union[None, Unset, str]): Start date for filtering (ISO format: YYYY-MM-DD) - end_date (Union[None, Unset, str]): End date for filtering (ISO format: YYYY-MM-DD) - limit (Union[Unset, int]): Maximum number of transactions to return Default: 100. - offset (Union[Unset, int]): Number of transactions to skip Default: 0. + start_date (None | str | Unset): Start date for filtering (ISO format: YYYY-MM-DD) + end_date (None | str | Unset): End date for filtering (ISO format: YYYY-MM-DD) + limit (int | Unset): Maximum number of transactions to return Default: 100. + offset (int | Unset): Number of transactions to skip Default: 0. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[DetailedTransactionsResponse, ErrorResponse, HTTPValidationError]] + Response[DetailedTransactionsResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -180,13 +180,13 @@ def sync( graph_id: str, *, client: AuthenticatedClient, - transaction_type: Union[None, Unset, str] = UNSET, - operation_type: Union[None, Unset, str] = UNSET, - start_date: Union[None, Unset, str] = UNSET, - end_date: Union[None, Unset, str] = UNSET, - limit: Union[Unset, int] = 100, - offset: Union[Unset, int] = 0, -) -> Optional[Union[DetailedTransactionsResponse, ErrorResponse, HTTPValidationError]]: + transaction_type: None | str | Unset = UNSET, + operation_type: None | str | Unset = UNSET, + start_date: None | str | Unset = UNSET, + end_date: None | str | Unset = UNSET, + limit: int | Unset = 100, + offset: int | Unset = 0, +) -> DetailedTransactionsResponse | ErrorResponse | HTTPValidationError | None: """List Credit Transactions Retrieve detailed credit transaction history for the specified graph. @@ -207,21 +207,21 @@ def sync( Args: graph_id (str): Graph database identifier - transaction_type (Union[None, Unset, str]): Filter by transaction type (allocation, + transaction_type (None | str | Unset): Filter by transaction type (allocation, consumption, bonus, refund) - operation_type (Union[None, Unset, str]): Filter by operation type (e.g., entity_lookup, + operation_type (None | str | Unset): Filter by operation type (e.g., entity_lookup, cypher_query) - start_date (Union[None, Unset, str]): Start date for filtering (ISO format: YYYY-MM-DD) - end_date (Union[None, Unset, str]): End date for filtering (ISO format: YYYY-MM-DD) - limit (Union[Unset, int]): Maximum number of transactions to return Default: 100. - offset (Union[Unset, int]): Number of transactions to skip Default: 0. + start_date (None | str | Unset): Start date for filtering (ISO format: YYYY-MM-DD) + end_date (None | str | Unset): End date for filtering (ISO format: YYYY-MM-DD) + limit (int | Unset): Maximum number of transactions to return Default: 100. + offset (int | Unset): Number of transactions to skip Default: 0. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[DetailedTransactionsResponse, ErrorResponse, HTTPValidationError] + DetailedTransactionsResponse | ErrorResponse | HTTPValidationError """ return sync_detailed( @@ -240,13 +240,13 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, - transaction_type: Union[None, Unset, str] = UNSET, - operation_type: Union[None, Unset, str] = UNSET, - start_date: Union[None, Unset, str] = UNSET, - end_date: Union[None, Unset, str] = UNSET, - limit: Union[Unset, int] = 100, - offset: Union[Unset, int] = 0, -) -> Response[Union[DetailedTransactionsResponse, ErrorResponse, HTTPValidationError]]: + transaction_type: None | str | Unset = UNSET, + operation_type: None | str | Unset = UNSET, + start_date: None | str | Unset = UNSET, + end_date: None | str | Unset = UNSET, + limit: int | Unset = 100, + offset: int | Unset = 0, +) -> Response[DetailedTransactionsResponse | ErrorResponse | HTTPValidationError]: """List Credit Transactions Retrieve detailed credit transaction history for the specified graph. @@ -267,21 +267,21 @@ async def asyncio_detailed( Args: graph_id (str): Graph database identifier - transaction_type (Union[None, Unset, str]): Filter by transaction type (allocation, + transaction_type (None | str | Unset): Filter by transaction type (allocation, consumption, bonus, refund) - operation_type (Union[None, Unset, str]): Filter by operation type (e.g., entity_lookup, + operation_type (None | str | Unset): Filter by operation type (e.g., entity_lookup, cypher_query) - start_date (Union[None, Unset, str]): Start date for filtering (ISO format: YYYY-MM-DD) - end_date (Union[None, Unset, str]): End date for filtering (ISO format: YYYY-MM-DD) - limit (Union[Unset, int]): Maximum number of transactions to return Default: 100. - offset (Union[Unset, int]): Number of transactions to skip Default: 0. + start_date (None | str | Unset): Start date for filtering (ISO format: YYYY-MM-DD) + end_date (None | str | Unset): End date for filtering (ISO format: YYYY-MM-DD) + limit (int | Unset): Maximum number of transactions to return Default: 100. + offset (int | Unset): Number of transactions to skip Default: 0. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[DetailedTransactionsResponse, ErrorResponse, HTTPValidationError]] + Response[DetailedTransactionsResponse | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -303,13 +303,13 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, - transaction_type: Union[None, Unset, str] = UNSET, - operation_type: Union[None, Unset, str] = UNSET, - start_date: Union[None, Unset, str] = UNSET, - end_date: Union[None, Unset, str] = UNSET, - limit: Union[Unset, int] = 100, - offset: Union[Unset, int] = 0, -) -> Optional[Union[DetailedTransactionsResponse, ErrorResponse, HTTPValidationError]]: + transaction_type: None | str | Unset = UNSET, + operation_type: None | str | Unset = UNSET, + start_date: None | str | Unset = UNSET, + end_date: None | str | Unset = UNSET, + limit: int | Unset = 100, + offset: int | Unset = 0, +) -> DetailedTransactionsResponse | ErrorResponse | HTTPValidationError | None: """List Credit Transactions Retrieve detailed credit transaction history for the specified graph. @@ -330,21 +330,21 @@ async def asyncio( Args: graph_id (str): Graph database identifier - transaction_type (Union[None, Unset, str]): Filter by transaction type (allocation, + transaction_type (None | str | Unset): Filter by transaction type (allocation, consumption, bonus, refund) - operation_type (Union[None, Unset, str]): Filter by operation type (e.g., entity_lookup, + operation_type (None | str | Unset): Filter by operation type (e.g., entity_lookup, cypher_query) - start_date (Union[None, Unset, str]): Start date for filtering (ISO format: YYYY-MM-DD) - end_date (Union[None, Unset, str]): End date for filtering (ISO format: YYYY-MM-DD) - limit (Union[Unset, int]): Maximum number of transactions to return Default: 100. - offset (Union[Unset, int]): Number of transactions to skip Default: 0. + start_date (None | str | Unset): Start date for filtering (ISO format: YYYY-MM-DD) + end_date (None | str | Unset): End date for filtering (ISO format: YYYY-MM-DD) + limit (int | Unset): Maximum number of transactions to return Default: 100. + offset (int | Unset): Number of transactions to skip Default: 0. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[DetailedTransactionsResponse, ErrorResponse, HTTPValidationError] + DetailedTransactionsResponse | ErrorResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/files/__init__.py b/robosystems_client/api/files/__init__.py new file mode 100644 index 0000000..2d7c0b2 --- /dev/null +++ b/robosystems_client/api/files/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/robosystems_client/api/files/create_file_upload.py b/robosystems_client/api/files/create_file_upload.py new file mode 100644 index 0000000..503531e --- /dev/null +++ b/robosystems_client/api/files/create_file_upload.py @@ -0,0 +1,311 @@ +from http import HTTPStatus +from typing import Any, cast + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.error_response import ErrorResponse +from ...models.file_upload_request import FileUploadRequest +from ...models.file_upload_response import FileUploadResponse +from ...models.http_validation_error import HTTPValidationError +from ...types import Response + + +def _get_kwargs( + graph_id: str, + *, + body: FileUploadRequest, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": f"/v1/graphs/{graph_id}/files", + } + + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | ErrorResponse | FileUploadResponse | HTTPValidationError | None: + if response.status_code == 200: + response_200 = FileUploadResponse.from_dict(response.json()) + + return response_200 + + if response.status_code == 400: + response_400 = ErrorResponse.from_dict(response.json()) + + return response_400 + + if response.status_code == 401: + response_401 = cast(Any, None) + return response_401 + + if response.status_code == 403: + response_403 = ErrorResponse.from_dict(response.json()) + + return response_403 + + if response.status_code == 404: + response_404 = ErrorResponse.from_dict(response.json()) + + return response_404 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | ErrorResponse | FileUploadResponse | HTTPValidationError]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + body: FileUploadRequest, +) -> Response[Any | ErrorResponse | FileUploadResponse | HTTPValidationError]: + """Create File Upload + + Generate presigned S3 URL for file upload. + + Initiate file upload by generating a secure, time-limited presigned S3 URL. + Files are first-class resources uploaded directly to S3. + + **Request Body:** + - `file_name`: Name of the file (1-255 characters) + - `file_format`: Format (parquet, csv, json) + - `table_name`: Table to associate file with + + **Upload Workflow:** + 1. Call this endpoint to get presigned URL + 2. PUT file directly to S3 URL + 3. Call PATCH /files/{file_id} with status='uploaded' + 4. Backend validates and stages in DuckDB immediately + 5. Background task ingests to graph + + **Supported Formats:** + - Parquet, CSV, JSON + + **Auto-Table Creation:** + Tables are automatically created if they don't exist. + + **Important Notes:** + - Presigned URLs expire (default: 1 hour) + - Files are graph-scoped, independent resources + - Upload URL generation is included - no credit consumption + + Args: + graph_id (str): + body (FileUploadRequest): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any | ErrorResponse | FileUploadResponse | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + body=body, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + *, + client: AuthenticatedClient, + body: FileUploadRequest, +) -> Any | ErrorResponse | FileUploadResponse | HTTPValidationError | None: + """Create File Upload + + Generate presigned S3 URL for file upload. + + Initiate file upload by generating a secure, time-limited presigned S3 URL. + Files are first-class resources uploaded directly to S3. + + **Request Body:** + - `file_name`: Name of the file (1-255 characters) + - `file_format`: Format (parquet, csv, json) + - `table_name`: Table to associate file with + + **Upload Workflow:** + 1. Call this endpoint to get presigned URL + 2. PUT file directly to S3 URL + 3. Call PATCH /files/{file_id} with status='uploaded' + 4. Backend validates and stages in DuckDB immediately + 5. Background task ingests to graph + + **Supported Formats:** + - Parquet, CSV, JSON + + **Auto-Table Creation:** + Tables are automatically created if they don't exist. + + **Important Notes:** + - Presigned URLs expire (default: 1 hour) + - Files are graph-scoped, independent resources + - Upload URL generation is included - no credit consumption + + Args: + graph_id (str): + body (FileUploadRequest): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Any | ErrorResponse | FileUploadResponse | HTTPValidationError + """ + + return sync_detailed( + graph_id=graph_id, + client=client, + body=body, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + body: FileUploadRequest, +) -> Response[Any | ErrorResponse | FileUploadResponse | HTTPValidationError]: + """Create File Upload + + Generate presigned S3 URL for file upload. + + Initiate file upload by generating a secure, time-limited presigned S3 URL. + Files are first-class resources uploaded directly to S3. + + **Request Body:** + - `file_name`: Name of the file (1-255 characters) + - `file_format`: Format (parquet, csv, json) + - `table_name`: Table to associate file with + + **Upload Workflow:** + 1. Call this endpoint to get presigned URL + 2. PUT file directly to S3 URL + 3. Call PATCH /files/{file_id} with status='uploaded' + 4. Backend validates and stages in DuckDB immediately + 5. Background task ingests to graph + + **Supported Formats:** + - Parquet, CSV, JSON + + **Auto-Table Creation:** + Tables are automatically created if they don't exist. + + **Important Notes:** + - Presigned URLs expire (default: 1 hour) + - Files are graph-scoped, independent resources + - Upload URL generation is included - no credit consumption + + Args: + graph_id (str): + body (FileUploadRequest): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any | ErrorResponse | FileUploadResponse | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + body=body, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + *, + client: AuthenticatedClient, + body: FileUploadRequest, +) -> Any | ErrorResponse | FileUploadResponse | HTTPValidationError | None: + """Create File Upload + + Generate presigned S3 URL for file upload. + + Initiate file upload by generating a secure, time-limited presigned S3 URL. + Files are first-class resources uploaded directly to S3. + + **Request Body:** + - `file_name`: Name of the file (1-255 characters) + - `file_format`: Format (parquet, csv, json) + - `table_name`: Table to associate file with + + **Upload Workflow:** + 1. Call this endpoint to get presigned URL + 2. PUT file directly to S3 URL + 3. Call PATCH /files/{file_id} with status='uploaded' + 4. Backend validates and stages in DuckDB immediately + 5. Background task ingests to graph + + **Supported Formats:** + - Parquet, CSV, JSON + + **Auto-Table Creation:** + Tables are automatically created if they don't exist. + + **Important Notes:** + - Presigned URLs expire (default: 1 hour) + - Files are graph-scoped, independent resources + - Upload URL generation is included - no credit consumption + + Args: + graph_id (str): + body (FileUploadRequest): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Any | ErrorResponse | FileUploadResponse | HTTPValidationError + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + client=client, + body=body, + ) + ).parsed diff --git a/robosystems_client/api/files/delete_file.py b/robosystems_client/api/files/delete_file.py new file mode 100644 index 0000000..4d6a4d1 --- /dev/null +++ b/robosystems_client/api/files/delete_file.py @@ -0,0 +1,354 @@ +from http import HTTPStatus +from typing import Any, cast + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.delete_file_response import DeleteFileResponse +from ...models.error_response import ErrorResponse +from ...models.http_validation_error import HTTPValidationError +from ...types import UNSET, Response, Unset + + +def _get_kwargs( + graph_id: str, + file_id: str, + *, + cascade: bool | Unset = False, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + params["cascade"] = cascade + + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} + + _kwargs: dict[str, Any] = { + "method": "delete", + "url": f"/v1/graphs/{graph_id}/files/{file_id}", + "params": params, + } + + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | DeleteFileResponse | ErrorResponse | HTTPValidationError | None: + if response.status_code == 200: + response_200 = DeleteFileResponse.from_dict(response.json()) + + return response_200 + + if response.status_code == 401: + response_401 = cast(Any, None) + return response_401 + + if response.status_code == 403: + response_403 = ErrorResponse.from_dict(response.json()) + + return response_403 + + if response.status_code == 404: + response_404 = ErrorResponse.from_dict(response.json()) + + return response_404 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | DeleteFileResponse | ErrorResponse | HTTPValidationError]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + file_id: str, + *, + client: AuthenticatedClient, + cascade: bool | Unset = False, +) -> Response[Any | DeleteFileResponse | ErrorResponse | HTTPValidationError]: + """Delete File + + Delete file from all layers. + + Remove file from S3, database tracking, and optionally from DuckDB and graph. + Files are deleted by file_id, independent of table context. + + **Query Parameters:** + - `cascade` (optional, default=false): Delete from all layers including DuckDB + + **What Happens (cascade=false):** + 1. File deleted from S3 + 2. Database record removed + 3. Table statistics updated + + **What Happens (cascade=true):** + 1. File data deleted from all DuckDB tables (by file_id) + 2. Graph marked as stale + 3. File deleted from S3 + 4. Database record removed + 5. Table statistics updated + + **Use Cases:** + - Remove incorrect or duplicate files + - Clean up failed uploads + - Delete files before graph ingestion + - Surgical data removal with cascade + + **Security:** + - Write access required + - Shared repositories block deletions + - Full audit trail + + **Important:** + - Use cascade=true for immediate DuckDB cleanup + - Graph rebuild recommended after cascade deletion + - File deletion is included - no credit consumption + + Args: + graph_id (str): + file_id (str): File ID + cascade (bool | Unset): If true, delete from all layers including DuckDB and mark graph + stale Default: False. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any | DeleteFileResponse | ErrorResponse | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + file_id=file_id, + cascade=cascade, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + file_id: str, + *, + client: AuthenticatedClient, + cascade: bool | Unset = False, +) -> Any | DeleteFileResponse | ErrorResponse | HTTPValidationError | None: + """Delete File + + Delete file from all layers. + + Remove file from S3, database tracking, and optionally from DuckDB and graph. + Files are deleted by file_id, independent of table context. + + **Query Parameters:** + - `cascade` (optional, default=false): Delete from all layers including DuckDB + + **What Happens (cascade=false):** + 1. File deleted from S3 + 2. Database record removed + 3. Table statistics updated + + **What Happens (cascade=true):** + 1. File data deleted from all DuckDB tables (by file_id) + 2. Graph marked as stale + 3. File deleted from S3 + 4. Database record removed + 5. Table statistics updated + + **Use Cases:** + - Remove incorrect or duplicate files + - Clean up failed uploads + - Delete files before graph ingestion + - Surgical data removal with cascade + + **Security:** + - Write access required + - Shared repositories block deletions + - Full audit trail + + **Important:** + - Use cascade=true for immediate DuckDB cleanup + - Graph rebuild recommended after cascade deletion + - File deletion is included - no credit consumption + + Args: + graph_id (str): + file_id (str): File ID + cascade (bool | Unset): If true, delete from all layers including DuckDB and mark graph + stale Default: False. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Any | DeleteFileResponse | ErrorResponse | HTTPValidationError + """ + + return sync_detailed( + graph_id=graph_id, + file_id=file_id, + client=client, + cascade=cascade, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + file_id: str, + *, + client: AuthenticatedClient, + cascade: bool | Unset = False, +) -> Response[Any | DeleteFileResponse | ErrorResponse | HTTPValidationError]: + """Delete File + + Delete file from all layers. + + Remove file from S3, database tracking, and optionally from DuckDB and graph. + Files are deleted by file_id, independent of table context. + + **Query Parameters:** + - `cascade` (optional, default=false): Delete from all layers including DuckDB + + **What Happens (cascade=false):** + 1. File deleted from S3 + 2. Database record removed + 3. Table statistics updated + + **What Happens (cascade=true):** + 1. File data deleted from all DuckDB tables (by file_id) + 2. Graph marked as stale + 3. File deleted from S3 + 4. Database record removed + 5. Table statistics updated + + **Use Cases:** + - Remove incorrect or duplicate files + - Clean up failed uploads + - Delete files before graph ingestion + - Surgical data removal with cascade + + **Security:** + - Write access required + - Shared repositories block deletions + - Full audit trail + + **Important:** + - Use cascade=true for immediate DuckDB cleanup + - Graph rebuild recommended after cascade deletion + - File deletion is included - no credit consumption + + Args: + graph_id (str): + file_id (str): File ID + cascade (bool | Unset): If true, delete from all layers including DuckDB and mark graph + stale Default: False. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any | DeleteFileResponse | ErrorResponse | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + file_id=file_id, + cascade=cascade, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + file_id: str, + *, + client: AuthenticatedClient, + cascade: bool | Unset = False, +) -> Any | DeleteFileResponse | ErrorResponse | HTTPValidationError | None: + """Delete File + + Delete file from all layers. + + Remove file from S3, database tracking, and optionally from DuckDB and graph. + Files are deleted by file_id, independent of table context. + + **Query Parameters:** + - `cascade` (optional, default=false): Delete from all layers including DuckDB + + **What Happens (cascade=false):** + 1. File deleted from S3 + 2. Database record removed + 3. Table statistics updated + + **What Happens (cascade=true):** + 1. File data deleted from all DuckDB tables (by file_id) + 2. Graph marked as stale + 3. File deleted from S3 + 4. Database record removed + 5. Table statistics updated + + **Use Cases:** + - Remove incorrect or duplicate files + - Clean up failed uploads + - Delete files before graph ingestion + - Surgical data removal with cascade + + **Security:** + - Write access required + - Shared repositories block deletions + - Full audit trail + + **Important:** + - Use cascade=true for immediate DuckDB cleanup + - Graph rebuild recommended after cascade deletion + - File deletion is included - no credit consumption + + Args: + graph_id (str): + file_id (str): File ID + cascade (bool | Unset): If true, delete from all layers including DuckDB and mark graph + stale Default: False. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Any | DeleteFileResponse | ErrorResponse | HTTPValidationError + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + file_id=file_id, + client=client, + cascade=cascade, + ) + ).parsed diff --git a/robosystems_client/api/files/get_file.py b/robosystems_client/api/files/get_file.py new file mode 100644 index 0000000..64691f9 --- /dev/null +++ b/robosystems_client/api/files/get_file.py @@ -0,0 +1,329 @@ +from http import HTTPStatus +from typing import Any, cast + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.error_response import ErrorResponse +from ...models.get_file_info_response import GetFileInfoResponse +from ...models.http_validation_error import HTTPValidationError +from ...types import Response + + +def _get_kwargs( + graph_id: str, + file_id: str, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": f"/v1/graphs/{graph_id}/files/{file_id}", + } + + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | ErrorResponse | GetFileInfoResponse | HTTPValidationError | None: + if response.status_code == 200: + response_200 = GetFileInfoResponse.from_dict(response.json()) + + return response_200 + + if response.status_code == 401: + response_401 = cast(Any, None) + return response_401 + + if response.status_code == 403: + response_403 = ErrorResponse.from_dict(response.json()) + + return response_403 + + if response.status_code == 404: + response_404 = ErrorResponse.from_dict(response.json()) + + return response_404 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | ErrorResponse | GetFileInfoResponse | HTTPValidationError]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + file_id: str, + *, + client: AuthenticatedClient, +) -> Response[Any | ErrorResponse | GetFileInfoResponse | HTTPValidationError]: + """Get File Information + + Get detailed information about a specific file. + + Retrieve comprehensive metadata for a single file by file_id, independent of + table context. Files are first-class resources with complete lifecycle tracking. + + **Returned Information:** + - File ID, name, format, size + - Upload status and timestamps + - **Enhanced Multi-Layer Status** (new in this version): + - S3 layer: upload_status, uploaded_at, size_bytes, row_count + - DuckDB layer: duckdb_status, duckdb_staged_at, duckdb_row_count + - Graph layer: graph_status, graph_ingested_at + - Table association + - S3 location + + **Multi-Layer Pipeline Visibility:** + The `layers` object provides independent status tracking across the three-tier + data pipeline: + - **S3 (Immutable Source)**: File upload and validation + - **DuckDB (Mutable Staging)**: Immediate queryability with file provenance + - **Graph (Immutable View)**: Optional graph database materialization + + Each layer shows its own status, timestamp, and row count (where applicable), + enabling precise debugging and monitoring of the data ingestion flow. + + **Use Cases:** + - Validate file upload completion + - Monitor multi-layer ingestion progress in real-time + - Debug upload or staging issues at specific layers + - Verify file metadata and row counts + - Track file provenance through the pipeline + - Identify bottlenecks in the ingestion process + + **Note:** + File info retrieval is included - no credit consumption + + Args: + graph_id (str): + file_id (str): File ID + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any | ErrorResponse | GetFileInfoResponse | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + file_id=file_id, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + file_id: str, + *, + client: AuthenticatedClient, +) -> Any | ErrorResponse | GetFileInfoResponse | HTTPValidationError | None: + """Get File Information + + Get detailed information about a specific file. + + Retrieve comprehensive metadata for a single file by file_id, independent of + table context. Files are first-class resources with complete lifecycle tracking. + + **Returned Information:** + - File ID, name, format, size + - Upload status and timestamps + - **Enhanced Multi-Layer Status** (new in this version): + - S3 layer: upload_status, uploaded_at, size_bytes, row_count + - DuckDB layer: duckdb_status, duckdb_staged_at, duckdb_row_count + - Graph layer: graph_status, graph_ingested_at + - Table association + - S3 location + + **Multi-Layer Pipeline Visibility:** + The `layers` object provides independent status tracking across the three-tier + data pipeline: + - **S3 (Immutable Source)**: File upload and validation + - **DuckDB (Mutable Staging)**: Immediate queryability with file provenance + - **Graph (Immutable View)**: Optional graph database materialization + + Each layer shows its own status, timestamp, and row count (where applicable), + enabling precise debugging and monitoring of the data ingestion flow. + + **Use Cases:** + - Validate file upload completion + - Monitor multi-layer ingestion progress in real-time + - Debug upload or staging issues at specific layers + - Verify file metadata and row counts + - Track file provenance through the pipeline + - Identify bottlenecks in the ingestion process + + **Note:** + File info retrieval is included - no credit consumption + + Args: + graph_id (str): + file_id (str): File ID + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Any | ErrorResponse | GetFileInfoResponse | HTTPValidationError + """ + + return sync_detailed( + graph_id=graph_id, + file_id=file_id, + client=client, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + file_id: str, + *, + client: AuthenticatedClient, +) -> Response[Any | ErrorResponse | GetFileInfoResponse | HTTPValidationError]: + """Get File Information + + Get detailed information about a specific file. + + Retrieve comprehensive metadata for a single file by file_id, independent of + table context. Files are first-class resources with complete lifecycle tracking. + + **Returned Information:** + - File ID, name, format, size + - Upload status and timestamps + - **Enhanced Multi-Layer Status** (new in this version): + - S3 layer: upload_status, uploaded_at, size_bytes, row_count + - DuckDB layer: duckdb_status, duckdb_staged_at, duckdb_row_count + - Graph layer: graph_status, graph_ingested_at + - Table association + - S3 location + + **Multi-Layer Pipeline Visibility:** + The `layers` object provides independent status tracking across the three-tier + data pipeline: + - **S3 (Immutable Source)**: File upload and validation + - **DuckDB (Mutable Staging)**: Immediate queryability with file provenance + - **Graph (Immutable View)**: Optional graph database materialization + + Each layer shows its own status, timestamp, and row count (where applicable), + enabling precise debugging and monitoring of the data ingestion flow. + + **Use Cases:** + - Validate file upload completion + - Monitor multi-layer ingestion progress in real-time + - Debug upload or staging issues at specific layers + - Verify file metadata and row counts + - Track file provenance through the pipeline + - Identify bottlenecks in the ingestion process + + **Note:** + File info retrieval is included - no credit consumption + + Args: + graph_id (str): + file_id (str): File ID + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any | ErrorResponse | GetFileInfoResponse | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + file_id=file_id, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + file_id: str, + *, + client: AuthenticatedClient, +) -> Any | ErrorResponse | GetFileInfoResponse | HTTPValidationError | None: + """Get File Information + + Get detailed information about a specific file. + + Retrieve comprehensive metadata for a single file by file_id, independent of + table context. Files are first-class resources with complete lifecycle tracking. + + **Returned Information:** + - File ID, name, format, size + - Upload status and timestamps + - **Enhanced Multi-Layer Status** (new in this version): + - S3 layer: upload_status, uploaded_at, size_bytes, row_count + - DuckDB layer: duckdb_status, duckdb_staged_at, duckdb_row_count + - Graph layer: graph_status, graph_ingested_at + - Table association + - S3 location + + **Multi-Layer Pipeline Visibility:** + The `layers` object provides independent status tracking across the three-tier + data pipeline: + - **S3 (Immutable Source)**: File upload and validation + - **DuckDB (Mutable Staging)**: Immediate queryability with file provenance + - **Graph (Immutable View)**: Optional graph database materialization + + Each layer shows its own status, timestamp, and row count (where applicable), + enabling precise debugging and monitoring of the data ingestion flow. + + **Use Cases:** + - Validate file upload completion + - Monitor multi-layer ingestion progress in real-time + - Debug upload or staging issues at specific layers + - Verify file metadata and row counts + - Track file provenance through the pipeline + - Identify bottlenecks in the ingestion process + + **Note:** + File info retrieval is included - no credit consumption + + Args: + graph_id (str): + file_id (str): File ID + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Any | ErrorResponse | GetFileInfoResponse | HTTPValidationError + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + file_id=file_id, + client=client, + ) + ).parsed diff --git a/robosystems_client/api/files/list_files.py b/robosystems_client/api/files/list_files.py new file mode 100644 index 0000000..88734dd --- /dev/null +++ b/robosystems_client/api/files/list_files.py @@ -0,0 +1,346 @@ +from http import HTTPStatus +from typing import Any, cast + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.error_response import ErrorResponse +from ...models.http_validation_error import HTTPValidationError +from ...models.list_table_files_response import ListTableFilesResponse +from ...types import UNSET, Response, Unset + + +def _get_kwargs( + graph_id: str, + *, + table_name: None | str | Unset = UNSET, + status: None | str | Unset = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + json_table_name: None | str | Unset + if isinstance(table_name, Unset): + json_table_name = UNSET + else: + json_table_name = table_name + params["table_name"] = json_table_name + + json_status: None | str | Unset + if isinstance(status, Unset): + json_status = UNSET + else: + json_status = status + params["status"] = json_status + + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} + + _kwargs: dict[str, Any] = { + "method": "get", + "url": f"/v1/graphs/{graph_id}/files", + "params": params, + } + + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | ErrorResponse | HTTPValidationError | ListTableFilesResponse | None: + if response.status_code == 200: + response_200 = ListTableFilesResponse.from_dict(response.json()) + + return response_200 + + if response.status_code == 401: + response_401 = cast(Any, None) + return response_401 + + if response.status_code == 403: + response_403 = ErrorResponse.from_dict(response.json()) + + return response_403 + + if response.status_code == 404: + response_404 = ErrorResponse.from_dict(response.json()) + + return response_404 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | ErrorResponse | HTTPValidationError | ListTableFilesResponse]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + table_name: None | str | Unset = UNSET, + status: None | str | Unset = UNSET, +) -> Response[Any | ErrorResponse | HTTPValidationError | ListTableFilesResponse]: + """List Files in Graph + + List all files in the graph with optional filtering. + + Get a complete inventory of files across all tables or filtered by table name, + status, or other criteria. Files are first-class resources with independent lifecycle. + + **Query Parameters:** + - `table_name` (optional): Filter by table name + - `status` (optional): Filter by upload status (uploaded, pending, failed, etc.) + + **Use Cases:** + - Monitor file upload progress across all tables + - Verify files are ready for ingestion + - Check file metadata and sizes + - Track storage usage per graph + - Identify failed or incomplete uploads + - Audit file provenance + + **Returned Metadata:** + - File ID, name, and format (parquet, csv, json) + - Size in bytes and row count (if available) + - Upload status and timestamps + - DuckDB and graph ingestion status + - Table association + + **File Lifecycle Tracking:** + Multi-layer status across S3 → DuckDB → Graph pipeline + + **Important Notes:** + - Files are graph-scoped, not table-scoped + - Use table_name parameter to filter by table + - File listing is included - no credit consumption + + Args: + graph_id (str): + table_name (None | str | Unset): Filter by table name (optional) + status (None | str | Unset): Filter by upload status (optional) + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any | ErrorResponse | HTTPValidationError | ListTableFilesResponse] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + table_name=table_name, + status=status, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + *, + client: AuthenticatedClient, + table_name: None | str | Unset = UNSET, + status: None | str | Unset = UNSET, +) -> Any | ErrorResponse | HTTPValidationError | ListTableFilesResponse | None: + """List Files in Graph + + List all files in the graph with optional filtering. + + Get a complete inventory of files across all tables or filtered by table name, + status, or other criteria. Files are first-class resources with independent lifecycle. + + **Query Parameters:** + - `table_name` (optional): Filter by table name + - `status` (optional): Filter by upload status (uploaded, pending, failed, etc.) + + **Use Cases:** + - Monitor file upload progress across all tables + - Verify files are ready for ingestion + - Check file metadata and sizes + - Track storage usage per graph + - Identify failed or incomplete uploads + - Audit file provenance + + **Returned Metadata:** + - File ID, name, and format (parquet, csv, json) + - Size in bytes and row count (if available) + - Upload status and timestamps + - DuckDB and graph ingestion status + - Table association + + **File Lifecycle Tracking:** + Multi-layer status across S3 → DuckDB → Graph pipeline + + **Important Notes:** + - Files are graph-scoped, not table-scoped + - Use table_name parameter to filter by table + - File listing is included - no credit consumption + + Args: + graph_id (str): + table_name (None | str | Unset): Filter by table name (optional) + status (None | str | Unset): Filter by upload status (optional) + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Any | ErrorResponse | HTTPValidationError | ListTableFilesResponse + """ + + return sync_detailed( + graph_id=graph_id, + client=client, + table_name=table_name, + status=status, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + table_name: None | str | Unset = UNSET, + status: None | str | Unset = UNSET, +) -> Response[Any | ErrorResponse | HTTPValidationError | ListTableFilesResponse]: + """List Files in Graph + + List all files in the graph with optional filtering. + + Get a complete inventory of files across all tables or filtered by table name, + status, or other criteria. Files are first-class resources with independent lifecycle. + + **Query Parameters:** + - `table_name` (optional): Filter by table name + - `status` (optional): Filter by upload status (uploaded, pending, failed, etc.) + + **Use Cases:** + - Monitor file upload progress across all tables + - Verify files are ready for ingestion + - Check file metadata and sizes + - Track storage usage per graph + - Identify failed or incomplete uploads + - Audit file provenance + + **Returned Metadata:** + - File ID, name, and format (parquet, csv, json) + - Size in bytes and row count (if available) + - Upload status and timestamps + - DuckDB and graph ingestion status + - Table association + + **File Lifecycle Tracking:** + Multi-layer status across S3 → DuckDB → Graph pipeline + + **Important Notes:** + - Files are graph-scoped, not table-scoped + - Use table_name parameter to filter by table + - File listing is included - no credit consumption + + Args: + graph_id (str): + table_name (None | str | Unset): Filter by table name (optional) + status (None | str | Unset): Filter by upload status (optional) + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any | ErrorResponse | HTTPValidationError | ListTableFilesResponse] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + table_name=table_name, + status=status, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + *, + client: AuthenticatedClient, + table_name: None | str | Unset = UNSET, + status: None | str | Unset = UNSET, +) -> Any | ErrorResponse | HTTPValidationError | ListTableFilesResponse | None: + """List Files in Graph + + List all files in the graph with optional filtering. + + Get a complete inventory of files across all tables or filtered by table name, + status, or other criteria. Files are first-class resources with independent lifecycle. + + **Query Parameters:** + - `table_name` (optional): Filter by table name + - `status` (optional): Filter by upload status (uploaded, pending, failed, etc.) + + **Use Cases:** + - Monitor file upload progress across all tables + - Verify files are ready for ingestion + - Check file metadata and sizes + - Track storage usage per graph + - Identify failed or incomplete uploads + - Audit file provenance + + **Returned Metadata:** + - File ID, name, and format (parquet, csv, json) + - Size in bytes and row count (if available) + - Upload status and timestamps + - DuckDB and graph ingestion status + - Table association + + **File Lifecycle Tracking:** + Multi-layer status across S3 → DuckDB → Graph pipeline + + **Important Notes:** + - Files are graph-scoped, not table-scoped + - Use table_name parameter to filter by table + - File listing is included - no credit consumption + + Args: + graph_id (str): + table_name (None | str | Unset): Filter by table name (optional) + status (None | str | Unset): Filter by upload status (optional) + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Any | ErrorResponse | HTTPValidationError | ListTableFilesResponse + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + client=client, + table_name=table_name, + status=status, + ) + ).parsed diff --git a/robosystems_client/api/files/update_file.py b/robosystems_client/api/files/update_file.py new file mode 100644 index 0000000..2b90aef --- /dev/null +++ b/robosystems_client/api/files/update_file.py @@ -0,0 +1,320 @@ +from http import HTTPStatus +from typing import Any, cast + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.error_response import ErrorResponse +from ...models.file_status_update import FileStatusUpdate +from ...models.http_validation_error import HTTPValidationError +from ...models.update_file_response_updatefile import UpdateFileResponseUpdatefile +from ...types import Response + + +def _get_kwargs( + graph_id: str, + file_id: str, + *, + body: FileStatusUpdate, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "patch", + "url": f"/v1/graphs/{graph_id}/files/{file_id}", + } + + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | ErrorResponse | HTTPValidationError | UpdateFileResponseUpdatefile | None: + if response.status_code == 200: + response_200 = UpdateFileResponseUpdatefile.from_dict(response.json()) + + return response_200 + + if response.status_code == 400: + response_400 = ErrorResponse.from_dict(response.json()) + + return response_400 + + if response.status_code == 401: + response_401 = cast(Any, None) + return response_401 + + if response.status_code == 403: + response_403 = ErrorResponse.from_dict(response.json()) + + return response_403 + + if response.status_code == 404: + response_404 = ErrorResponse.from_dict(response.json()) + + return response_404 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | ErrorResponse | HTTPValidationError | UpdateFileResponseUpdatefile]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + file_id: str, + *, + client: AuthenticatedClient, + body: FileStatusUpdate, +) -> Response[Any | ErrorResponse | HTTPValidationError | UpdateFileResponseUpdatefile]: + """Update File Status + + Update file status and trigger processing. + + Update file status after upload completion. Setting status='uploaded' triggers + immediate DuckDB staging and optional graph ingestion. + + **Request Body:** + - `status`: New status (uploaded, disabled, failed) + - `ingest_to_graph` (optional): If true, auto-ingest to graph after DuckDB staging + + **What Happens (status='uploaded'):** + 1. File validated in S3 + 2. Row count calculated + 3. DuckDB staging triggered immediately (Celery task) + 4. If ingest_to_graph=true, graph ingestion queued + 5. File queryable in DuckDB within seconds + + **Use Cases:** + - Signal upload completion + - Trigger immediate DuckDB staging + - Enable/disable files + - Mark failed uploads + + **Important:** + - Files must exist in S3 before marking uploaded + - DuckDB staging happens asynchronously + - Graph ingestion is optional (ingest_to_graph flag) + + Args: + graph_id (str): + file_id (str): File ID + body (FileStatusUpdate): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any | ErrorResponse | HTTPValidationError | UpdateFileResponseUpdatefile] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + file_id=file_id, + body=body, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + file_id: str, + *, + client: AuthenticatedClient, + body: FileStatusUpdate, +) -> Any | ErrorResponse | HTTPValidationError | UpdateFileResponseUpdatefile | None: + """Update File Status + + Update file status and trigger processing. + + Update file status after upload completion. Setting status='uploaded' triggers + immediate DuckDB staging and optional graph ingestion. + + **Request Body:** + - `status`: New status (uploaded, disabled, failed) + - `ingest_to_graph` (optional): If true, auto-ingest to graph after DuckDB staging + + **What Happens (status='uploaded'):** + 1. File validated in S3 + 2. Row count calculated + 3. DuckDB staging triggered immediately (Celery task) + 4. If ingest_to_graph=true, graph ingestion queued + 5. File queryable in DuckDB within seconds + + **Use Cases:** + - Signal upload completion + - Trigger immediate DuckDB staging + - Enable/disable files + - Mark failed uploads + + **Important:** + - Files must exist in S3 before marking uploaded + - DuckDB staging happens asynchronously + - Graph ingestion is optional (ingest_to_graph flag) + + Args: + graph_id (str): + file_id (str): File ID + body (FileStatusUpdate): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Any | ErrorResponse | HTTPValidationError | UpdateFileResponseUpdatefile + """ + + return sync_detailed( + graph_id=graph_id, + file_id=file_id, + client=client, + body=body, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + file_id: str, + *, + client: AuthenticatedClient, + body: FileStatusUpdate, +) -> Response[Any | ErrorResponse | HTTPValidationError | UpdateFileResponseUpdatefile]: + """Update File Status + + Update file status and trigger processing. + + Update file status after upload completion. Setting status='uploaded' triggers + immediate DuckDB staging and optional graph ingestion. + + **Request Body:** + - `status`: New status (uploaded, disabled, failed) + - `ingest_to_graph` (optional): If true, auto-ingest to graph after DuckDB staging + + **What Happens (status='uploaded'):** + 1. File validated in S3 + 2. Row count calculated + 3. DuckDB staging triggered immediately (Celery task) + 4. If ingest_to_graph=true, graph ingestion queued + 5. File queryable in DuckDB within seconds + + **Use Cases:** + - Signal upload completion + - Trigger immediate DuckDB staging + - Enable/disable files + - Mark failed uploads + + **Important:** + - Files must exist in S3 before marking uploaded + - DuckDB staging happens asynchronously + - Graph ingestion is optional (ingest_to_graph flag) + + Args: + graph_id (str): + file_id (str): File ID + body (FileStatusUpdate): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any | ErrorResponse | HTTPValidationError | UpdateFileResponseUpdatefile] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + file_id=file_id, + body=body, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + file_id: str, + *, + client: AuthenticatedClient, + body: FileStatusUpdate, +) -> Any | ErrorResponse | HTTPValidationError | UpdateFileResponseUpdatefile | None: + """Update File Status + + Update file status and trigger processing. + + Update file status after upload completion. Setting status='uploaded' triggers + immediate DuckDB staging and optional graph ingestion. + + **Request Body:** + - `status`: New status (uploaded, disabled, failed) + - `ingest_to_graph` (optional): If true, auto-ingest to graph after DuckDB staging + + **What Happens (status='uploaded'):** + 1. File validated in S3 + 2. Row count calculated + 3. DuckDB staging triggered immediately (Celery task) + 4. If ingest_to_graph=true, graph ingestion queued + 5. File queryable in DuckDB within seconds + + **Use Cases:** + - Signal upload completion + - Trigger immediate DuckDB staging + - Enable/disable files + - Mark failed uploads + + **Important:** + - Files must exist in S3 before marking uploaded + - DuckDB staging happens asynchronously + - Graph ingestion is optional (ingest_to_graph flag) + + Args: + graph_id (str): + file_id (str): File ID + body (FileStatusUpdate): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Any | ErrorResponse | HTTPValidationError | UpdateFileResponseUpdatefile + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + file_id=file_id, + client=client, + body=body, + ) + ).parsed diff --git a/robosystems_client/api/graph_health/get_database_health.py b/robosystems_client/api/graph_health/get_database_health.py index 6c1db98..f3f960e 100644 --- a/robosystems_client/api/graph_health/get_database_health.py +++ b/robosystems_client/api/graph_health/get_database_health.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -22,8 +22,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, DatabaseHealthResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | DatabaseHealthResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = DatabaseHealthResponse.from_dict(response.json()) @@ -53,8 +53,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, DatabaseHealthResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | DatabaseHealthResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -67,7 +67,7 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, DatabaseHealthResponse, HTTPValidationError]]: +) -> Response[Any | DatabaseHealthResponse | HTTPValidationError]: """Database Health Check Get comprehensive health information for the graph database. @@ -103,7 +103,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, DatabaseHealthResponse, HTTPValidationError]] + Response[Any | DatabaseHealthResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -121,7 +121,7 @@ def sync( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, DatabaseHealthResponse, HTTPValidationError]]: +) -> Any | DatabaseHealthResponse | HTTPValidationError | None: """Database Health Check Get comprehensive health information for the graph database. @@ -157,7 +157,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, DatabaseHealthResponse, HTTPValidationError] + Any | DatabaseHealthResponse | HTTPValidationError """ return sync_detailed( @@ -170,7 +170,7 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, DatabaseHealthResponse, HTTPValidationError]]: +) -> Response[Any | DatabaseHealthResponse | HTTPValidationError]: """Database Health Check Get comprehensive health information for the graph database. @@ -206,7 +206,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, DatabaseHealthResponse, HTTPValidationError]] + Response[Any | DatabaseHealthResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -222,7 +222,7 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, DatabaseHealthResponse, HTTPValidationError]]: +) -> Any | DatabaseHealthResponse | HTTPValidationError | None: """Database Health Check Get comprehensive health information for the graph database. @@ -258,7 +258,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, DatabaseHealthResponse, HTTPValidationError] + Any | DatabaseHealthResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/graph_info/get_database_info.py b/robosystems_client/api/graph_info/get_database_info.py index 0732b40..e1052b7 100644 --- a/robosystems_client/api/graph_info/get_database_info.py +++ b/robosystems_client/api/graph_info/get_database_info.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -22,8 +22,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, DatabaseInfoResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | DatabaseInfoResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = DatabaseInfoResponse.from_dict(response.json()) @@ -53,8 +53,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, DatabaseInfoResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | DatabaseInfoResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -67,7 +67,7 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, DatabaseInfoResponse, HTTPValidationError]]: +) -> Response[Any | DatabaseInfoResponse | HTTPValidationError]: """Database Information Get comprehensive database information and statistics. @@ -104,7 +104,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, DatabaseInfoResponse, HTTPValidationError]] + Response[Any | DatabaseInfoResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -122,7 +122,7 @@ def sync( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, DatabaseInfoResponse, HTTPValidationError]]: +) -> Any | DatabaseInfoResponse | HTTPValidationError | None: """Database Information Get comprehensive database information and statistics. @@ -159,7 +159,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, DatabaseInfoResponse, HTTPValidationError] + Any | DatabaseInfoResponse | HTTPValidationError """ return sync_detailed( @@ -172,7 +172,7 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, DatabaseInfoResponse, HTTPValidationError]]: +) -> Response[Any | DatabaseInfoResponse | HTTPValidationError]: """Database Information Get comprehensive database information and statistics. @@ -209,7 +209,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, DatabaseInfoResponse, HTTPValidationError]] + Response[Any | DatabaseInfoResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -225,7 +225,7 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, DatabaseInfoResponse, HTTPValidationError]]: +) -> Any | DatabaseInfoResponse | HTTPValidationError | None: """Database Information Get comprehensive database information and statistics. @@ -262,7 +262,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, DatabaseInfoResponse, HTTPValidationError] + Any | DatabaseInfoResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/graph_limits/get_graph_limits.py b/robosystems_client/api/graph_limits/get_graph_limits.py index 12a4d7f..54a18b8 100644 --- a/robosystems_client/api/graph_limits/get_graph_limits.py +++ b/robosystems_client/api/graph_limits/get_graph_limits.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -22,8 +22,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, GraphLimitsResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | GraphLimitsResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = GraphLimitsResponse.from_dict(response.json()) @@ -53,8 +53,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, GraphLimitsResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | GraphLimitsResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -67,7 +67,7 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, GraphLimitsResponse, HTTPValidationError]]: +) -> Response[Any | GraphLimitsResponse | HTTPValidationError]: """Get Graph Operational Limits Get comprehensive operational limits for the graph database. @@ -92,7 +92,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, GraphLimitsResponse, HTTPValidationError]] + Response[Any | GraphLimitsResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -110,7 +110,7 @@ def sync( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, GraphLimitsResponse, HTTPValidationError]]: +) -> Any | GraphLimitsResponse | HTTPValidationError | None: """Get Graph Operational Limits Get comprehensive operational limits for the graph database. @@ -135,7 +135,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, GraphLimitsResponse, HTTPValidationError] + Any | GraphLimitsResponse | HTTPValidationError """ return sync_detailed( @@ -148,7 +148,7 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, GraphLimitsResponse, HTTPValidationError]]: +) -> Response[Any | GraphLimitsResponse | HTTPValidationError]: """Get Graph Operational Limits Get comprehensive operational limits for the graph database. @@ -173,7 +173,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, GraphLimitsResponse, HTTPValidationError]] + Response[Any | GraphLimitsResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -189,7 +189,7 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, GraphLimitsResponse, HTTPValidationError]]: +) -> Any | GraphLimitsResponse | HTTPValidationError | None: """Get Graph Operational Limits Get comprehensive operational limits for the graph database. @@ -214,7 +214,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, GraphLimitsResponse, HTTPValidationError] + Any | GraphLimitsResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/graphs/create_graph.py b/robosystems_client/api/graphs/create_graph.py index dbdb8d8..f7ffd90 100644 --- a/robosystems_client/api/graphs/create_graph.py +++ b/robosystems_client/api/graphs/create_graph.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -30,8 +30,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | HTTPValidationError | None: if response.status_code == 202: response_202 = response.json() return response_202 @@ -48,8 +48,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -62,7 +62,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: CreateGraphRequest, -) -> Response[Union[Any, HTTPValidationError]]: +) -> Response[Any | HTTPValidationError]: """Create New Graph Database Create a new graph database with specified schema and optionally an initial entity. @@ -144,7 +144,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError]] + Response[Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -162,7 +162,7 @@ def sync( *, client: AuthenticatedClient, body: CreateGraphRequest, -) -> Optional[Union[Any, HTTPValidationError]]: +) -> Any | HTTPValidationError | None: """Create New Graph Database Create a new graph database with specified schema and optionally an initial entity. @@ -244,7 +244,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError] + Any | HTTPValidationError """ return sync_detailed( @@ -257,7 +257,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: CreateGraphRequest, -) -> Response[Union[Any, HTTPValidationError]]: +) -> Response[Any | HTTPValidationError]: """Create New Graph Database Create a new graph database with specified schema and optionally an initial entity. @@ -339,7 +339,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError]] + Response[Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -355,7 +355,7 @@ async def asyncio( *, client: AuthenticatedClient, body: CreateGraphRequest, -) -> Optional[Union[Any, HTTPValidationError]]: +) -> Any | HTTPValidationError | None: """Create New Graph Database Create a new graph database with specified schema and optionally an initial entity. @@ -437,7 +437,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError] + Any | HTTPValidationError """ return ( diff --git a/robosystems_client/api/graphs/get_available_extensions.py b/robosystems_client/api/graphs/get_available_extensions.py index 44e2797..d5e65aa 100644 --- a/robosystems_client/api/graphs/get_available_extensions.py +++ b/robosystems_client/api/graphs/get_available_extensions.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -19,8 +19,8 @@ def _get_kwargs() -> dict[str, Any]: def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, AvailableExtensionsResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | AvailableExtensionsResponse | None: if response.status_code == 200: response_200 = AvailableExtensionsResponse.from_dict(response.json()) @@ -37,8 +37,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, AvailableExtensionsResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | AvailableExtensionsResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -50,7 +50,7 @@ def _build_response( def sync_detailed( *, client: AuthenticatedClient, -) -> Response[Union[Any, AvailableExtensionsResponse]]: +) -> Response[Any | AvailableExtensionsResponse]: """Get Available Schema Extensions List all available schema extensions for graph creation. @@ -88,7 +88,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, AvailableExtensionsResponse]] + Response[Any | AvailableExtensionsResponse] """ kwargs = _get_kwargs() @@ -103,7 +103,7 @@ def sync_detailed( def sync( *, client: AuthenticatedClient, -) -> Optional[Union[Any, AvailableExtensionsResponse]]: +) -> Any | AvailableExtensionsResponse | None: """Get Available Schema Extensions List all available schema extensions for graph creation. @@ -141,7 +141,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, AvailableExtensionsResponse] + Any | AvailableExtensionsResponse """ return sync_detailed( @@ -152,7 +152,7 @@ def sync( async def asyncio_detailed( *, client: AuthenticatedClient, -) -> Response[Union[Any, AvailableExtensionsResponse]]: +) -> Response[Any | AvailableExtensionsResponse]: """Get Available Schema Extensions List all available schema extensions for graph creation. @@ -190,7 +190,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, AvailableExtensionsResponse]] + Response[Any | AvailableExtensionsResponse] """ kwargs = _get_kwargs() @@ -203,7 +203,7 @@ async def asyncio_detailed( async def asyncio( *, client: AuthenticatedClient, -) -> Optional[Union[Any, AvailableExtensionsResponse]]: +) -> Any | AvailableExtensionsResponse | None: """Get Available Schema Extensions List all available schema extensions for graph creation. @@ -241,7 +241,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, AvailableExtensionsResponse] + Any | AvailableExtensionsResponse """ return ( diff --git a/robosystems_client/api/graphs/get_available_graph_tiers.py b/robosystems_client/api/graphs/get_available_graph_tiers.py index ffa1fa7..c622557 100644 --- a/robosystems_client/api/graphs/get_available_graph_tiers.py +++ b/robosystems_client/api/graphs/get_available_graph_tiers.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -12,7 +12,7 @@ def _get_kwargs( *, - include_disabled: Union[Unset, bool] = False, + include_disabled: bool | Unset = False, ) -> dict[str, Any]: params: dict[str, Any] = {} @@ -30,8 +30,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, AvailableGraphTiersResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | AvailableGraphTiersResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = AvailableGraphTiersResponse.from_dict(response.json()) @@ -53,8 +53,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, AvailableGraphTiersResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | AvailableGraphTiersResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -66,8 +66,8 @@ def _build_response( def sync_detailed( *, client: AuthenticatedClient, - include_disabled: Union[Unset, bool] = False, -) -> Response[Union[Any, AvailableGraphTiersResponse, HTTPValidationError]]: + include_disabled: bool | Unset = False, +) -> Response[Any | AvailableGraphTiersResponse | HTTPValidationError]: """Get Available Graph Tiers List all available graph database tier configurations. @@ -99,14 +99,14 @@ def sync_detailed( Tier listing is included - no credit consumption required. Args: - include_disabled (Union[Unset, bool]): Default: False. + include_disabled (bool | Unset): Default: False. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, AvailableGraphTiersResponse, HTTPValidationError]] + Response[Any | AvailableGraphTiersResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -123,8 +123,8 @@ def sync_detailed( def sync( *, client: AuthenticatedClient, - include_disabled: Union[Unset, bool] = False, -) -> Optional[Union[Any, AvailableGraphTiersResponse, HTTPValidationError]]: + include_disabled: bool | Unset = False, +) -> Any | AvailableGraphTiersResponse | HTTPValidationError | None: """Get Available Graph Tiers List all available graph database tier configurations. @@ -156,14 +156,14 @@ def sync( Tier listing is included - no credit consumption required. Args: - include_disabled (Union[Unset, bool]): Default: False. + include_disabled (bool | Unset): Default: False. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, AvailableGraphTiersResponse, HTTPValidationError] + Any | AvailableGraphTiersResponse | HTTPValidationError """ return sync_detailed( @@ -175,8 +175,8 @@ def sync( async def asyncio_detailed( *, client: AuthenticatedClient, - include_disabled: Union[Unset, bool] = False, -) -> Response[Union[Any, AvailableGraphTiersResponse, HTTPValidationError]]: + include_disabled: bool | Unset = False, +) -> Response[Any | AvailableGraphTiersResponse | HTTPValidationError]: """Get Available Graph Tiers List all available graph database tier configurations. @@ -208,14 +208,14 @@ async def asyncio_detailed( Tier listing is included - no credit consumption required. Args: - include_disabled (Union[Unset, bool]): Default: False. + include_disabled (bool | Unset): Default: False. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, AvailableGraphTiersResponse, HTTPValidationError]] + Response[Any | AvailableGraphTiersResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -230,8 +230,8 @@ async def asyncio_detailed( async def asyncio( *, client: AuthenticatedClient, - include_disabled: Union[Unset, bool] = False, -) -> Optional[Union[Any, AvailableGraphTiersResponse, HTTPValidationError]]: + include_disabled: bool | Unset = False, +) -> Any | AvailableGraphTiersResponse | HTTPValidationError | None: """Get Available Graph Tiers List all available graph database tier configurations. @@ -263,14 +263,14 @@ async def asyncio( Tier listing is included - no credit consumption required. Args: - include_disabled (Union[Unset, bool]): Default: False. + include_disabled (bool | Unset): Default: False. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, AvailableGraphTiersResponse, HTTPValidationError] + Any | AvailableGraphTiersResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/graphs/get_graphs.py b/robosystems_client/api/graphs/get_graphs.py index fa639d1..6ab17d1 100644 --- a/robosystems_client/api/graphs/get_graphs.py +++ b/robosystems_client/api/graphs/get_graphs.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -19,8 +19,8 @@ def _get_kwargs() -> dict[str, Any]: def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, UserGraphsResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | UserGraphsResponse | None: if response.status_code == 200: response_200 = UserGraphsResponse.from_dict(response.json()) @@ -37,8 +37,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, UserGraphsResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | UserGraphsResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -50,7 +50,7 @@ def _build_response( def sync_detailed( *, client: AuthenticatedClient, -) -> Response[Union[Any, UserGraphsResponse]]: +) -> Response[Any | UserGraphsResponse]: r"""Get User Graphs and Repositories List all graph databases and shared repositories accessible to the current user. @@ -100,7 +100,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, UserGraphsResponse]] + Response[Any | UserGraphsResponse] """ kwargs = _get_kwargs() @@ -115,7 +115,7 @@ def sync_detailed( def sync( *, client: AuthenticatedClient, -) -> Optional[Union[Any, UserGraphsResponse]]: +) -> Any | UserGraphsResponse | None: r"""Get User Graphs and Repositories List all graph databases and shared repositories accessible to the current user. @@ -165,7 +165,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, UserGraphsResponse] + Any | UserGraphsResponse """ return sync_detailed( @@ -176,7 +176,7 @@ def sync( async def asyncio_detailed( *, client: AuthenticatedClient, -) -> Response[Union[Any, UserGraphsResponse]]: +) -> Response[Any | UserGraphsResponse]: r"""Get User Graphs and Repositories List all graph databases and shared repositories accessible to the current user. @@ -226,7 +226,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, UserGraphsResponse]] + Response[Any | UserGraphsResponse] """ kwargs = _get_kwargs() @@ -239,7 +239,7 @@ async def asyncio_detailed( async def asyncio( *, client: AuthenticatedClient, -) -> Optional[Union[Any, UserGraphsResponse]]: +) -> Any | UserGraphsResponse | None: r"""Get User Graphs and Repositories List all graph databases and shared repositories accessible to the current user. @@ -289,7 +289,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, UserGraphsResponse] + Any | UserGraphsResponse """ return ( diff --git a/robosystems_client/api/graphs/select_graph.py b/robosystems_client/api/graphs/select_graph.py index 2b35685..6c8daac 100644 --- a/robosystems_client/api/graphs/select_graph.py +++ b/robosystems_client/api/graphs/select_graph.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -23,8 +23,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ErrorResponse | HTTPValidationError | SuccessResponse | None: if response.status_code == 200: response_200 = SuccessResponse.from_dict(response.json()) @@ -57,8 +57,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[ErrorResponse | HTTPValidationError | SuccessResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -71,7 +71,7 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: +) -> Response[ErrorResponse | HTTPValidationError | SuccessResponse]: """Select Graph Select a specific graph as the active workspace for the user. @@ -112,7 +112,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]] + Response[ErrorResponse | HTTPValidationError | SuccessResponse] """ kwargs = _get_kwargs( @@ -130,7 +130,7 @@ def sync( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: +) -> ErrorResponse | HTTPValidationError | SuccessResponse | None: """Select Graph Select a specific graph as the active workspace for the user. @@ -171,7 +171,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, SuccessResponse] + ErrorResponse | HTTPValidationError | SuccessResponse """ return sync_detailed( @@ -184,7 +184,7 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: +) -> Response[ErrorResponse | HTTPValidationError | SuccessResponse]: """Select Graph Select a specific graph as the active workspace for the user. @@ -225,7 +225,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]] + Response[ErrorResponse | HTTPValidationError | SuccessResponse] """ kwargs = _get_kwargs( @@ -241,7 +241,7 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: +) -> ErrorResponse | HTTPValidationError | SuccessResponse | None: """Select Graph Select a specific graph as the active workspace for the user. @@ -282,7 +282,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, SuccessResponse] + ErrorResponse | HTTPValidationError | SuccessResponse """ return ( diff --git a/robosystems_client/api/materialization/__init__.py b/robosystems_client/api/materialization/__init__.py new file mode 100644 index 0000000..2d7c0b2 --- /dev/null +++ b/robosystems_client/api/materialization/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/robosystems_client/api/materialization/get_materialization_status.py b/robosystems_client/api/materialization/get_materialization_status.py new file mode 100644 index 0000000..949e7ba --- /dev/null +++ b/robosystems_client/api/materialization/get_materialization_status.py @@ -0,0 +1,272 @@ +from http import HTTPStatus +from typing import Any, cast + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.error_response import ErrorResponse +from ...models.http_validation_error import HTTPValidationError +from ...models.materialize_status_response import MaterializeStatusResponse +from ...types import Response + + +def _get_kwargs( + graph_id: str, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": f"/v1/graphs/{graph_id}/materialize/status", + } + + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | ErrorResponse | HTTPValidationError | MaterializeStatusResponse | None: + if response.status_code == 200: + response_200 = MaterializeStatusResponse.from_dict(response.json()) + + return response_200 + + if response.status_code == 401: + response_401 = cast(Any, None) + return response_401 + + if response.status_code == 403: + response_403 = ErrorResponse.from_dict(response.json()) + + return response_403 + + if response.status_code == 404: + response_404 = ErrorResponse.from_dict(response.json()) + + return response_404 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | ErrorResponse | HTTPValidationError | MaterializeStatusResponse]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + *, + client: AuthenticatedClient, +) -> Response[Any | ErrorResponse | HTTPValidationError | MaterializeStatusResponse]: + """Get Materialization Status + + Get current materialization status for the graph. + + Shows whether the graph is stale (DuckDB has changes not yet in graph database), + when it was last materialized, and how long since last materialization. + + **Status Information:** + - Whether graph is currently stale + - Reason for staleness if applicable + - When graph became stale + - When graph was last materialized + - Total materialization count + - Hours since last materialization + + **Use Cases:** + - Decide if materialization is needed + - Monitor graph freshness + - Track materialization history + - Understand data pipeline state + + **Important Notes:** + - Stale graph means DuckDB has changes not in graph + - Graph becomes stale after file deletions + - Materialization clears staleness + - Status retrieval is included - no credit consumption + + Args: + graph_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any | ErrorResponse | HTTPValidationError | MaterializeStatusResponse] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + *, + client: AuthenticatedClient, +) -> Any | ErrorResponse | HTTPValidationError | MaterializeStatusResponse | None: + """Get Materialization Status + + Get current materialization status for the graph. + + Shows whether the graph is stale (DuckDB has changes not yet in graph database), + when it was last materialized, and how long since last materialization. + + **Status Information:** + - Whether graph is currently stale + - Reason for staleness if applicable + - When graph became stale + - When graph was last materialized + - Total materialization count + - Hours since last materialization + + **Use Cases:** + - Decide if materialization is needed + - Monitor graph freshness + - Track materialization history + - Understand data pipeline state + + **Important Notes:** + - Stale graph means DuckDB has changes not in graph + - Graph becomes stale after file deletions + - Materialization clears staleness + - Status retrieval is included - no credit consumption + + Args: + graph_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Any | ErrorResponse | HTTPValidationError | MaterializeStatusResponse + """ + + return sync_detailed( + graph_id=graph_id, + client=client, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + *, + client: AuthenticatedClient, +) -> Response[Any | ErrorResponse | HTTPValidationError | MaterializeStatusResponse]: + """Get Materialization Status + + Get current materialization status for the graph. + + Shows whether the graph is stale (DuckDB has changes not yet in graph database), + when it was last materialized, and how long since last materialization. + + **Status Information:** + - Whether graph is currently stale + - Reason for staleness if applicable + - When graph became stale + - When graph was last materialized + - Total materialization count + - Hours since last materialization + + **Use Cases:** + - Decide if materialization is needed + - Monitor graph freshness + - Track materialization history + - Understand data pipeline state + + **Important Notes:** + - Stale graph means DuckDB has changes not in graph + - Graph becomes stale after file deletions + - Materialization clears staleness + - Status retrieval is included - no credit consumption + + Args: + graph_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any | ErrorResponse | HTTPValidationError | MaterializeStatusResponse] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + *, + client: AuthenticatedClient, +) -> Any | ErrorResponse | HTTPValidationError | MaterializeStatusResponse | None: + """Get Materialization Status + + Get current materialization status for the graph. + + Shows whether the graph is stale (DuckDB has changes not yet in graph database), + when it was last materialized, and how long since last materialization. + + **Status Information:** + - Whether graph is currently stale + - Reason for staleness if applicable + - When graph became stale + - When graph was last materialized + - Total materialization count + - Hours since last materialization + + **Use Cases:** + - Decide if materialization is needed + - Monitor graph freshness + - Track materialization history + - Understand data pipeline state + + **Important Notes:** + - Stale graph means DuckDB has changes not in graph + - Graph becomes stale after file deletions + - Materialization clears staleness + - Status retrieval is included - no credit consumption + + Args: + graph_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Any | ErrorResponse | HTTPValidationError | MaterializeStatusResponse + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + client=client, + ) + ).parsed diff --git a/robosystems_client/api/materialization/materialize_graph.py b/robosystems_client/api/materialization/materialize_graph.py new file mode 100644 index 0000000..767636f --- /dev/null +++ b/robosystems_client/api/materialization/materialize_graph.py @@ -0,0 +1,416 @@ +from http import HTTPStatus +from typing import Any, cast + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.error_response import ErrorResponse +from ...models.http_validation_error import HTTPValidationError +from ...models.materialize_request import MaterializeRequest +from ...models.materialize_response import MaterializeResponse +from ...types import Response + + +def _get_kwargs( + graph_id: str, + *, + body: MaterializeRequest, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": f"/v1/graphs/{graph_id}/materialize", + } + + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | ErrorResponse | HTTPValidationError | MaterializeResponse | None: + if response.status_code == 200: + response_200 = MaterializeResponse.from_dict(response.json()) + + return response_200 + + if response.status_code == 400: + response_400 = ErrorResponse.from_dict(response.json()) + + return response_400 + + if response.status_code == 401: + response_401 = cast(Any, None) + return response_401 + + if response.status_code == 403: + response_403 = ErrorResponse.from_dict(response.json()) + + return response_403 + + if response.status_code == 404: + response_404 = ErrorResponse.from_dict(response.json()) + + return response_404 + + if response.status_code == 409: + response_409 = ErrorResponse.from_dict(response.json()) + + return response_409 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if response.status_code == 500: + response_500 = cast(Any, None) + return response_500 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | ErrorResponse | HTTPValidationError | MaterializeResponse]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + body: MaterializeRequest, +) -> Response[Any | ErrorResponse | HTTPValidationError | MaterializeResponse]: + """Materialize Graph from DuckDB + + Rebuild entire graph from DuckDB staging tables (materialized view pattern). + + This endpoint rebuilds the complete graph database from the current state of DuckDB + staging tables. It automatically discovers all tables, ingests them in the correct + order (nodes before relationships), and clears the staleness flag. + + **When to Use:** + - After batch uploads (files uploaded with ingest_to_graph=false) + - After cascade file deletions (graph marked stale) + - To ensure graph consistency with DuckDB state + - Periodic full refresh + + **What Happens:** + 1. Discovers all tables for the graph from PostgreSQL registry + 2. Sorts tables (nodes before relationships) + 3. Ingests all tables from DuckDB to graph in order + 4. Clears staleness flag on success + 5. Returns detailed materialization report + + **Staleness Check:** + By default, only materializes if graph is stale (after deletions or missed ingestions). + Use `force=true` to rebuild regardless of staleness. + + **Rebuild Feature:** + Setting `rebuild=true` regenerates the entire graph database from scratch: + - Deletes existing graph database + - Recreates with fresh schema from active GraphSchema + - Ingests all data files + - Safe operation - DuckDB is source of truth + - Useful for schema changes or data corrections + - Graph marked as 'rebuilding' during process + + **Table Ordering:** + Node tables (PascalCase) are ingested before relationship tables (UPPERCASE) to + ensure referential integrity. + + **Error Handling:** + With `ignore_errors=true` (default), continues materializing even if individual + rows fail. Failed rows are logged but don't stop the process. + + **Concurrency Control:** + Only one materialization can run per graph at a time. If another materialization is in progress, + you'll receive a 409 Conflict error. The distributed lock automatically expires after + the configured TTL (default: 1 hour) to prevent deadlocks from failed materializations. + + **Performance:** + Full graph materialization can take minutes for large datasets. Consider running + during off-peak hours for production systems. + + **Credits:** + Materialization is included - no credit consumption + + Args: + graph_id (str): + body (MaterializeRequest): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any | ErrorResponse | HTTPValidationError | MaterializeResponse] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + body=body, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + *, + client: AuthenticatedClient, + body: MaterializeRequest, +) -> Any | ErrorResponse | HTTPValidationError | MaterializeResponse | None: + """Materialize Graph from DuckDB + + Rebuild entire graph from DuckDB staging tables (materialized view pattern). + + This endpoint rebuilds the complete graph database from the current state of DuckDB + staging tables. It automatically discovers all tables, ingests them in the correct + order (nodes before relationships), and clears the staleness flag. + + **When to Use:** + - After batch uploads (files uploaded with ingest_to_graph=false) + - After cascade file deletions (graph marked stale) + - To ensure graph consistency with DuckDB state + - Periodic full refresh + + **What Happens:** + 1. Discovers all tables for the graph from PostgreSQL registry + 2. Sorts tables (nodes before relationships) + 3. Ingests all tables from DuckDB to graph in order + 4. Clears staleness flag on success + 5. Returns detailed materialization report + + **Staleness Check:** + By default, only materializes if graph is stale (after deletions or missed ingestions). + Use `force=true` to rebuild regardless of staleness. + + **Rebuild Feature:** + Setting `rebuild=true` regenerates the entire graph database from scratch: + - Deletes existing graph database + - Recreates with fresh schema from active GraphSchema + - Ingests all data files + - Safe operation - DuckDB is source of truth + - Useful for schema changes or data corrections + - Graph marked as 'rebuilding' during process + + **Table Ordering:** + Node tables (PascalCase) are ingested before relationship tables (UPPERCASE) to + ensure referential integrity. + + **Error Handling:** + With `ignore_errors=true` (default), continues materializing even if individual + rows fail. Failed rows are logged but don't stop the process. + + **Concurrency Control:** + Only one materialization can run per graph at a time. If another materialization is in progress, + you'll receive a 409 Conflict error. The distributed lock automatically expires after + the configured TTL (default: 1 hour) to prevent deadlocks from failed materializations. + + **Performance:** + Full graph materialization can take minutes for large datasets. Consider running + during off-peak hours for production systems. + + **Credits:** + Materialization is included - no credit consumption + + Args: + graph_id (str): + body (MaterializeRequest): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Any | ErrorResponse | HTTPValidationError | MaterializeResponse + """ + + return sync_detailed( + graph_id=graph_id, + client=client, + body=body, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + body: MaterializeRequest, +) -> Response[Any | ErrorResponse | HTTPValidationError | MaterializeResponse]: + """Materialize Graph from DuckDB + + Rebuild entire graph from DuckDB staging tables (materialized view pattern). + + This endpoint rebuilds the complete graph database from the current state of DuckDB + staging tables. It automatically discovers all tables, ingests them in the correct + order (nodes before relationships), and clears the staleness flag. + + **When to Use:** + - After batch uploads (files uploaded with ingest_to_graph=false) + - After cascade file deletions (graph marked stale) + - To ensure graph consistency with DuckDB state + - Periodic full refresh + + **What Happens:** + 1. Discovers all tables for the graph from PostgreSQL registry + 2. Sorts tables (nodes before relationships) + 3. Ingests all tables from DuckDB to graph in order + 4. Clears staleness flag on success + 5. Returns detailed materialization report + + **Staleness Check:** + By default, only materializes if graph is stale (after deletions or missed ingestions). + Use `force=true` to rebuild regardless of staleness. + + **Rebuild Feature:** + Setting `rebuild=true` regenerates the entire graph database from scratch: + - Deletes existing graph database + - Recreates with fresh schema from active GraphSchema + - Ingests all data files + - Safe operation - DuckDB is source of truth + - Useful for schema changes or data corrections + - Graph marked as 'rebuilding' during process + + **Table Ordering:** + Node tables (PascalCase) are ingested before relationship tables (UPPERCASE) to + ensure referential integrity. + + **Error Handling:** + With `ignore_errors=true` (default), continues materializing even if individual + rows fail. Failed rows are logged but don't stop the process. + + **Concurrency Control:** + Only one materialization can run per graph at a time. If another materialization is in progress, + you'll receive a 409 Conflict error. The distributed lock automatically expires after + the configured TTL (default: 1 hour) to prevent deadlocks from failed materializations. + + **Performance:** + Full graph materialization can take minutes for large datasets. Consider running + during off-peak hours for production systems. + + **Credits:** + Materialization is included - no credit consumption + + Args: + graph_id (str): + body (MaterializeRequest): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any | ErrorResponse | HTTPValidationError | MaterializeResponse] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + body=body, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + *, + client: AuthenticatedClient, + body: MaterializeRequest, +) -> Any | ErrorResponse | HTTPValidationError | MaterializeResponse | None: + """Materialize Graph from DuckDB + + Rebuild entire graph from DuckDB staging tables (materialized view pattern). + + This endpoint rebuilds the complete graph database from the current state of DuckDB + staging tables. It automatically discovers all tables, ingests them in the correct + order (nodes before relationships), and clears the staleness flag. + + **When to Use:** + - After batch uploads (files uploaded with ingest_to_graph=false) + - After cascade file deletions (graph marked stale) + - To ensure graph consistency with DuckDB state + - Periodic full refresh + + **What Happens:** + 1. Discovers all tables for the graph from PostgreSQL registry + 2. Sorts tables (nodes before relationships) + 3. Ingests all tables from DuckDB to graph in order + 4. Clears staleness flag on success + 5. Returns detailed materialization report + + **Staleness Check:** + By default, only materializes if graph is stale (after deletions or missed ingestions). + Use `force=true` to rebuild regardless of staleness. + + **Rebuild Feature:** + Setting `rebuild=true` regenerates the entire graph database from scratch: + - Deletes existing graph database + - Recreates with fresh schema from active GraphSchema + - Ingests all data files + - Safe operation - DuckDB is source of truth + - Useful for schema changes or data corrections + - Graph marked as 'rebuilding' during process + + **Table Ordering:** + Node tables (PascalCase) are ingested before relationship tables (UPPERCASE) to + ensure referential integrity. + + **Error Handling:** + With `ignore_errors=true` (default), continues materializing even if individual + rows fail. Failed rows are logged but don't stop the process. + + **Concurrency Control:** + Only one materialization can run per graph at a time. If another materialization is in progress, + you'll receive a 409 Conflict error. The distributed lock automatically expires after + the configured TTL (default: 1 hour) to prevent deadlocks from failed materializations. + + **Performance:** + Full graph materialization can take minutes for large datasets. Consider running + during off-peak hours for production systems. + + **Credits:** + Materialization is included - no credit consumption + + Args: + graph_id (str): + body (MaterializeRequest): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Any | ErrorResponse | HTTPValidationError | MaterializeResponse + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + client=client, + body=body, + ) + ).parsed diff --git a/robosystems_client/api/mcp/call_mcp_tool.py b/robosystems_client/api/mcp/call_mcp_tool.py index f1d8cd4..621308e 100644 --- a/robosystems_client/api/mcp/call_mcp_tool.py +++ b/robosystems_client/api/mcp/call_mcp_tool.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -15,14 +15,14 @@ def _get_kwargs( graph_id: str, *, body: MCPToolCall, - format_: Union[None, Unset, str] = UNSET, - test_mode: Union[Unset, bool] = False, + format_: None | str | Unset = UNSET, + test_mode: bool | Unset = False, ) -> dict[str, Any]: headers: dict[str, Any] = {} params: dict[str, Any] = {} - json_format_: Union[None, Unset, str] + json_format_: None | str | Unset if isinstance(format_, Unset): json_format_ = UNSET else: @@ -48,8 +48,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | ErrorResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = response.json() return response_200 @@ -105,8 +105,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, ErrorResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | ErrorResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -120,9 +120,9 @@ def sync_detailed( *, client: AuthenticatedClient, body: MCPToolCall, - format_: Union[None, Unset, str] = UNSET, - test_mode: Union[Unset, bool] = False, -) -> Response[Union[Any, ErrorResponse, HTTPValidationError]]: + format_: None | str | Unset = UNSET, + test_mode: bool | Unset = False, +) -> Response[Any | ErrorResponse | HTTPValidationError]: """Execute MCP Tool Execute an MCP tool with intelligent response optimization. @@ -172,8 +172,8 @@ def sync_detailed( Args: graph_id (str): - format_ (Union[None, Unset, str]): Response format override (json, sse, ndjson) - test_mode (Union[Unset, bool]): Enable test mode for debugging Default: False. + format_ (None | str | Unset): Response format override (json, sse, ndjson) + test_mode (bool | Unset): Enable test mode for debugging Default: False. body (MCPToolCall): Request model for MCP tool execution. Raises: @@ -181,7 +181,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError]] + Response[Any | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -203,9 +203,9 @@ def sync( *, client: AuthenticatedClient, body: MCPToolCall, - format_: Union[None, Unset, str] = UNSET, - test_mode: Union[Unset, bool] = False, -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError]]: + format_: None | str | Unset = UNSET, + test_mode: bool | Unset = False, +) -> Any | ErrorResponse | HTTPValidationError | None: """Execute MCP Tool Execute an MCP tool with intelligent response optimization. @@ -255,8 +255,8 @@ def sync( Args: graph_id (str): - format_ (Union[None, Unset, str]): Response format override (json, sse, ndjson) - test_mode (Union[Unset, bool]): Enable test mode for debugging Default: False. + format_ (None | str | Unset): Response format override (json, sse, ndjson) + test_mode (bool | Unset): Enable test mode for debugging Default: False. body (MCPToolCall): Request model for MCP tool execution. Raises: @@ -264,7 +264,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, ErrorResponse, HTTPValidationError] + Any | ErrorResponse | HTTPValidationError """ return sync_detailed( @@ -281,9 +281,9 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: MCPToolCall, - format_: Union[None, Unset, str] = UNSET, - test_mode: Union[Unset, bool] = False, -) -> Response[Union[Any, ErrorResponse, HTTPValidationError]]: + format_: None | str | Unset = UNSET, + test_mode: bool | Unset = False, +) -> Response[Any | ErrorResponse | HTTPValidationError]: """Execute MCP Tool Execute an MCP tool with intelligent response optimization. @@ -333,8 +333,8 @@ async def asyncio_detailed( Args: graph_id (str): - format_ (Union[None, Unset, str]): Response format override (json, sse, ndjson) - test_mode (Union[Unset, bool]): Enable test mode for debugging Default: False. + format_ (None | str | Unset): Response format override (json, sse, ndjson) + test_mode (bool | Unset): Enable test mode for debugging Default: False. body (MCPToolCall): Request model for MCP tool execution. Raises: @@ -342,7 +342,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError]] + Response[Any | ErrorResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -362,9 +362,9 @@ async def asyncio( *, client: AuthenticatedClient, body: MCPToolCall, - format_: Union[None, Unset, str] = UNSET, - test_mode: Union[Unset, bool] = False, -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError]]: + format_: None | str | Unset = UNSET, + test_mode: bool | Unset = False, +) -> Any | ErrorResponse | HTTPValidationError | None: """Execute MCP Tool Execute an MCP tool with intelligent response optimization. @@ -414,8 +414,8 @@ async def asyncio( Args: graph_id (str): - format_ (Union[None, Unset, str]): Response format override (json, sse, ndjson) - test_mode (Union[Unset, bool]): Enable test mode for debugging Default: False. + format_ (None | str | Unset): Response format override (json, sse, ndjson) + test_mode (bool | Unset): Enable test mode for debugging Default: False. body (MCPToolCall): Request model for MCP tool execution. Raises: @@ -423,7 +423,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, ErrorResponse, HTTPValidationError] + Any | ErrorResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/mcp/list_mcp_tools.py b/robosystems_client/api/mcp/list_mcp_tools.py index 3a46ad7..57c07b8 100644 --- a/robosystems_client/api/mcp/list_mcp_tools.py +++ b/robosystems_client/api/mcp/list_mcp_tools.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -23,8 +23,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[ErrorResponse, HTTPValidationError, MCPToolsResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ErrorResponse | HTTPValidationError | MCPToolsResponse | None: if response.status_code == 200: response_200 = MCPToolsResponse.from_dict(response.json()) @@ -52,8 +52,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[ErrorResponse, HTTPValidationError, MCPToolsResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[ErrorResponse | HTTPValidationError | MCPToolsResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -66,7 +66,7 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[ErrorResponse, HTTPValidationError, MCPToolsResponse]]: +) -> Response[ErrorResponse | HTTPValidationError | MCPToolsResponse]: """List MCP Tools Get available Model Context Protocol tools for graph analysis. @@ -99,7 +99,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, MCPToolsResponse]] + Response[ErrorResponse | HTTPValidationError | MCPToolsResponse] """ kwargs = _get_kwargs( @@ -117,7 +117,7 @@ def sync( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[ErrorResponse, HTTPValidationError, MCPToolsResponse]]: +) -> ErrorResponse | HTTPValidationError | MCPToolsResponse | None: """List MCP Tools Get available Model Context Protocol tools for graph analysis. @@ -150,7 +150,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, MCPToolsResponse] + ErrorResponse | HTTPValidationError | MCPToolsResponse """ return sync_detailed( @@ -163,7 +163,7 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[ErrorResponse, HTTPValidationError, MCPToolsResponse]]: +) -> Response[ErrorResponse | HTTPValidationError | MCPToolsResponse]: """List MCP Tools Get available Model Context Protocol tools for graph analysis. @@ -196,7 +196,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, MCPToolsResponse]] + Response[ErrorResponse | HTTPValidationError | MCPToolsResponse] """ kwargs = _get_kwargs( @@ -212,7 +212,7 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[ErrorResponse, HTTPValidationError, MCPToolsResponse]]: +) -> ErrorResponse | HTTPValidationError | MCPToolsResponse | None: """List MCP Tools Get available Model Context Protocol tools for graph analysis. @@ -245,7 +245,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, MCPToolsResponse] + ErrorResponse | HTTPValidationError | MCPToolsResponse """ return ( diff --git a/robosystems_client/api/operations/cancel_operation.py b/robosystems_client/api/operations/cancel_operation.py index f152e96..7a6aa96 100644 --- a/robosystems_client/api/operations/cancel_operation.py +++ b/robosystems_client/api/operations/cancel_operation.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -24,8 +24,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, CancelOperationResponseCanceloperation, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | CancelOperationResponseCanceloperation | HTTPValidationError | None: if response.status_code == 200: response_200 = CancelOperationResponseCanceloperation.from_dict(response.json()) @@ -59,8 +59,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, CancelOperationResponseCanceloperation, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | CancelOperationResponseCanceloperation | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -73,7 +73,7 @@ def sync_detailed( operation_id: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, CancelOperationResponseCanceloperation, HTTPValidationError]]: +) -> Response[Any | CancelOperationResponseCanceloperation | HTTPValidationError]: """Cancel Operation Cancel a pending or running operation. @@ -94,7 +94,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, CancelOperationResponseCanceloperation, HTTPValidationError]] + Response[Any | CancelOperationResponseCanceloperation | HTTPValidationError] """ kwargs = _get_kwargs( @@ -112,7 +112,7 @@ def sync( operation_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, CancelOperationResponseCanceloperation, HTTPValidationError]]: +) -> Any | CancelOperationResponseCanceloperation | HTTPValidationError | None: """Cancel Operation Cancel a pending or running operation. @@ -133,7 +133,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, CancelOperationResponseCanceloperation, HTTPValidationError] + Any | CancelOperationResponseCanceloperation | HTTPValidationError """ return sync_detailed( @@ -146,7 +146,7 @@ async def asyncio_detailed( operation_id: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, CancelOperationResponseCanceloperation, HTTPValidationError]]: +) -> Response[Any | CancelOperationResponseCanceloperation | HTTPValidationError]: """Cancel Operation Cancel a pending or running operation. @@ -167,7 +167,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, CancelOperationResponseCanceloperation, HTTPValidationError]] + Response[Any | CancelOperationResponseCanceloperation | HTTPValidationError] """ kwargs = _get_kwargs( @@ -183,7 +183,7 @@ async def asyncio( operation_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, CancelOperationResponseCanceloperation, HTTPValidationError]]: +) -> Any | CancelOperationResponseCanceloperation | HTTPValidationError | None: """Cancel Operation Cancel a pending or running operation. @@ -204,7 +204,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, CancelOperationResponseCanceloperation, HTTPValidationError] + Any | CancelOperationResponseCanceloperation | HTTPValidationError """ return ( diff --git a/robosystems_client/api/operations/get_operation_status.py b/robosystems_client/api/operations/get_operation_status.py index 116d66c..552a3fc 100644 --- a/robosystems_client/api/operations/get_operation_status.py +++ b/robosystems_client/api/operations/get_operation_status.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -24,10 +24,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[ - Union[Any, GetOperationStatusResponseGetoperationstatus, HTTPValidationError] -]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | GetOperationStatusResponseGetoperationstatus | HTTPValidationError | None: if response.status_code == 200: response_200 = GetOperationStatusResponseGetoperationstatus.from_dict( response.json() @@ -59,10 +57,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[ - Union[Any, GetOperationStatusResponseGetoperationstatus, HTTPValidationError] -]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | GetOperationStatusResponseGetoperationstatus | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -75,9 +71,7 @@ def sync_detailed( operation_id: str, *, client: AuthenticatedClient, -) -> Response[ - Union[Any, GetOperationStatusResponseGetoperationstatus, HTTPValidationError] -]: +) -> Response[Any | GetOperationStatusResponseGetoperationstatus | HTTPValidationError]: """Get Operation Status Get current status and metadata for an operation. @@ -102,7 +96,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, GetOperationStatusResponseGetoperationstatus, HTTPValidationError]] + Response[Any | GetOperationStatusResponseGetoperationstatus | HTTPValidationError] """ kwargs = _get_kwargs( @@ -120,9 +114,7 @@ def sync( operation_id: str, *, client: AuthenticatedClient, -) -> Optional[ - Union[Any, GetOperationStatusResponseGetoperationstatus, HTTPValidationError] -]: +) -> Any | GetOperationStatusResponseGetoperationstatus | HTTPValidationError | None: """Get Operation Status Get current status and metadata for an operation. @@ -147,7 +139,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, GetOperationStatusResponseGetoperationstatus, HTTPValidationError] + Any | GetOperationStatusResponseGetoperationstatus | HTTPValidationError """ return sync_detailed( @@ -160,9 +152,7 @@ async def asyncio_detailed( operation_id: str, *, client: AuthenticatedClient, -) -> Response[ - Union[Any, GetOperationStatusResponseGetoperationstatus, HTTPValidationError] -]: +) -> Response[Any | GetOperationStatusResponseGetoperationstatus | HTTPValidationError]: """Get Operation Status Get current status and metadata for an operation. @@ -187,7 +177,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, GetOperationStatusResponseGetoperationstatus, HTTPValidationError]] + Response[Any | GetOperationStatusResponseGetoperationstatus | HTTPValidationError] """ kwargs = _get_kwargs( @@ -203,9 +193,7 @@ async def asyncio( operation_id: str, *, client: AuthenticatedClient, -) -> Optional[ - Union[Any, GetOperationStatusResponseGetoperationstatus, HTTPValidationError] -]: +) -> Any | GetOperationStatusResponseGetoperationstatus | HTTPValidationError | None: """Get Operation Status Get current status and metadata for an operation. @@ -230,7 +218,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, GetOperationStatusResponseGetoperationstatus, HTTPValidationError] + Any | GetOperationStatusResponseGetoperationstatus | HTTPValidationError """ return ( diff --git a/robosystems_client/api/operations/stream_operation_events.py b/robosystems_client/api/operations/stream_operation_events.py index 44eff20..204719e 100644 --- a/robosystems_client/api/operations/stream_operation_events.py +++ b/robosystems_client/api/operations/stream_operation_events.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -12,9 +12,9 @@ def _get_kwargs( operation_id: str, *, - from_sequence: Union[Unset, int] = 0, - token: Union[None, Unset, str] = UNSET, - authorization: Union[None, Unset, str] = UNSET, + from_sequence: int | Unset = 0, + token: None | str | Unset = UNSET, + authorization: None | str | Unset = UNSET, ) -> dict[str, Any]: headers: dict[str, Any] = {} if not isinstance(authorization, Unset): @@ -24,7 +24,7 @@ def _get_kwargs( params["from_sequence"] = from_sequence - json_token: Union[None, Unset, str] + json_token: None | str | Unset if isinstance(token, Unset): json_token = UNSET else: @@ -44,8 +44,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | HTTPValidationError | None: if response.status_code == 200: response_200 = response.json() return response_200 @@ -74,8 +74,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -88,10 +88,10 @@ def sync_detailed( operation_id: str, *, client: AuthenticatedClient, - from_sequence: Union[Unset, int] = 0, - token: Union[None, Unset, str] = UNSET, - authorization: Union[None, Unset, str] = UNSET, -) -> Response[Union[Any, HTTPValidationError]]: + from_sequence: int | Unset = 0, + token: None | str | Unset = UNSET, + authorization: None | str | Unset = UNSET, +) -> Response[Any | HTTPValidationError]: """Stream Operation Events Stream real-time events for an operation using Server-Sent Events (SSE). @@ -144,17 +144,17 @@ def sync_detailed( Args: operation_id (str): Operation identifier from initial submission - from_sequence (Union[Unset, int]): Start streaming from this sequence number (0 = from + from_sequence (int | Unset): Start streaming from this sequence number (0 = from beginning) Default: 0. - token (Union[None, Unset, str]): JWT token for SSE authentication - authorization (Union[None, Unset, str]): + token (None | str | Unset): JWT token for SSE authentication + authorization (None | str | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError]] + Response[Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -175,10 +175,10 @@ def sync( operation_id: str, *, client: AuthenticatedClient, - from_sequence: Union[Unset, int] = 0, - token: Union[None, Unset, str] = UNSET, - authorization: Union[None, Unset, str] = UNSET, -) -> Optional[Union[Any, HTTPValidationError]]: + from_sequence: int | Unset = 0, + token: None | str | Unset = UNSET, + authorization: None | str | Unset = UNSET, +) -> Any | HTTPValidationError | None: """Stream Operation Events Stream real-time events for an operation using Server-Sent Events (SSE). @@ -231,17 +231,17 @@ def sync( Args: operation_id (str): Operation identifier from initial submission - from_sequence (Union[Unset, int]): Start streaming from this sequence number (0 = from + from_sequence (int | Unset): Start streaming from this sequence number (0 = from beginning) Default: 0. - token (Union[None, Unset, str]): JWT token for SSE authentication - authorization (Union[None, Unset, str]): + token (None | str | Unset): JWT token for SSE authentication + authorization (None | str | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError] + Any | HTTPValidationError """ return sync_detailed( @@ -257,10 +257,10 @@ async def asyncio_detailed( operation_id: str, *, client: AuthenticatedClient, - from_sequence: Union[Unset, int] = 0, - token: Union[None, Unset, str] = UNSET, - authorization: Union[None, Unset, str] = UNSET, -) -> Response[Union[Any, HTTPValidationError]]: + from_sequence: int | Unset = 0, + token: None | str | Unset = UNSET, + authorization: None | str | Unset = UNSET, +) -> Response[Any | HTTPValidationError]: """Stream Operation Events Stream real-time events for an operation using Server-Sent Events (SSE). @@ -313,17 +313,17 @@ async def asyncio_detailed( Args: operation_id (str): Operation identifier from initial submission - from_sequence (Union[Unset, int]): Start streaming from this sequence number (0 = from + from_sequence (int | Unset): Start streaming from this sequence number (0 = from beginning) Default: 0. - token (Union[None, Unset, str]): JWT token for SSE authentication - authorization (Union[None, Unset, str]): + token (None | str | Unset): JWT token for SSE authentication + authorization (None | str | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError]] + Response[Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -342,10 +342,10 @@ async def asyncio( operation_id: str, *, client: AuthenticatedClient, - from_sequence: Union[Unset, int] = 0, - token: Union[None, Unset, str] = UNSET, - authorization: Union[None, Unset, str] = UNSET, -) -> Optional[Union[Any, HTTPValidationError]]: + from_sequence: int | Unset = 0, + token: None | str | Unset = UNSET, + authorization: None | str | Unset = UNSET, +) -> Any | HTTPValidationError | None: """Stream Operation Events Stream real-time events for an operation using Server-Sent Events (SSE). @@ -398,17 +398,17 @@ async def asyncio( Args: operation_id (str): Operation identifier from initial submission - from_sequence (Union[Unset, int]): Start streaming from this sequence number (0 = from + from_sequence (int | Unset): Start streaming from this sequence number (0 = from beginning) Default: 0. - token (Union[None, Unset, str]): JWT token for SSE authentication - authorization (Union[None, Unset, str]): + token (None | str | Unset): JWT token for SSE authentication + authorization (None | str | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError] + Any | HTTPValidationError """ return ( diff --git a/robosystems_client/api/org/create_org.py b/robosystems_client/api/org/create_org.py index 81531e1..f26c8f9 100644 --- a/robosystems_client/api/org/create_org.py +++ b/robosystems_client/api/org/create_org.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -31,8 +31,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, OrgDetailResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | OrgDetailResponse | None: if response.status_code == 201: response_201 = OrgDetailResponse.from_dict(response.json()) @@ -50,8 +50,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, OrgDetailResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | OrgDetailResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -64,7 +64,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: CreateOrgRequest, -) -> Response[Union[HTTPValidationError, OrgDetailResponse]]: +) -> Response[HTTPValidationError | OrgDetailResponse]: """Create Organization Create a new organization. The creating user becomes the owner. @@ -77,7 +77,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, OrgDetailResponse]] + Response[HTTPValidationError | OrgDetailResponse] """ kwargs = _get_kwargs( @@ -95,7 +95,7 @@ def sync( *, client: AuthenticatedClient, body: CreateOrgRequest, -) -> Optional[Union[HTTPValidationError, OrgDetailResponse]]: +) -> HTTPValidationError | OrgDetailResponse | None: """Create Organization Create a new organization. The creating user becomes the owner. @@ -108,7 +108,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, OrgDetailResponse] + HTTPValidationError | OrgDetailResponse """ return sync_detailed( @@ -121,7 +121,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: CreateOrgRequest, -) -> Response[Union[HTTPValidationError, OrgDetailResponse]]: +) -> Response[HTTPValidationError | OrgDetailResponse]: """Create Organization Create a new organization. The creating user becomes the owner. @@ -134,7 +134,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, OrgDetailResponse]] + Response[HTTPValidationError | OrgDetailResponse] """ kwargs = _get_kwargs( @@ -150,7 +150,7 @@ async def asyncio( *, client: AuthenticatedClient, body: CreateOrgRequest, -) -> Optional[Union[HTTPValidationError, OrgDetailResponse]]: +) -> HTTPValidationError | OrgDetailResponse | None: """Create Organization Create a new organization. The creating user becomes the owner. @@ -163,7 +163,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, OrgDetailResponse] + HTTPValidationError | OrgDetailResponse """ return ( diff --git a/robosystems_client/api/org/get_org.py b/robosystems_client/api/org/get_org.py index 1b7cfc3..8120817 100644 --- a/robosystems_client/api/org/get_org.py +++ b/robosystems_client/api/org/get_org.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -22,8 +22,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, OrgDetailResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | OrgDetailResponse | None: if response.status_code == 200: response_200 = OrgDetailResponse.from_dict(response.json()) @@ -41,8 +41,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, OrgDetailResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | OrgDetailResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -55,7 +55,7 @@ def sync_detailed( org_id: str, *, client: AuthenticatedClient, -) -> Response[Union[HTTPValidationError, OrgDetailResponse]]: +) -> Response[HTTPValidationError | OrgDetailResponse]: """Get Organization Get detailed information about an organization. @@ -68,7 +68,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, OrgDetailResponse]] + Response[HTTPValidationError | OrgDetailResponse] """ kwargs = _get_kwargs( @@ -86,7 +86,7 @@ def sync( org_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[HTTPValidationError, OrgDetailResponse]]: +) -> HTTPValidationError | OrgDetailResponse | None: """Get Organization Get detailed information about an organization. @@ -99,7 +99,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, OrgDetailResponse] + HTTPValidationError | OrgDetailResponse """ return sync_detailed( @@ -112,7 +112,7 @@ async def asyncio_detailed( org_id: str, *, client: AuthenticatedClient, -) -> Response[Union[HTTPValidationError, OrgDetailResponse]]: +) -> Response[HTTPValidationError | OrgDetailResponse]: """Get Organization Get detailed information about an organization. @@ -125,7 +125,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, OrgDetailResponse]] + Response[HTTPValidationError | OrgDetailResponse] """ kwargs = _get_kwargs( @@ -141,7 +141,7 @@ async def asyncio( org_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[HTTPValidationError, OrgDetailResponse]]: +) -> HTTPValidationError | OrgDetailResponse | None: """Get Organization Get detailed information about an organization. @@ -154,7 +154,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, OrgDetailResponse] + HTTPValidationError | OrgDetailResponse """ return ( diff --git a/robosystems_client/api/org/list_org_graphs.py b/robosystems_client/api/org/list_org_graphs.py index 5797dfa..23a2de4 100644 --- a/robosystems_client/api/org/list_org_graphs.py +++ b/robosystems_client/api/org/list_org_graphs.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -22,8 +22,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, list["ListOrgGraphsResponse200Item"]]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | list[ListOrgGraphsResponse200Item] | None: if response.status_code == 200: response_200 = [] _response_200 = response.json() @@ -46,8 +46,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, list["ListOrgGraphsResponse200Item"]]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | list[ListOrgGraphsResponse200Item]]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -60,7 +60,7 @@ def sync_detailed( org_id: str, *, client: AuthenticatedClient, -) -> Response[Union[HTTPValidationError, list["ListOrgGraphsResponse200Item"]]]: +) -> Response[HTTPValidationError | list[ListOrgGraphsResponse200Item]]: """List Organization Graphs Get all graphs belonging to an organization. @@ -73,7 +73,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, list['ListOrgGraphsResponse200Item']]] + Response[HTTPValidationError | list[ListOrgGraphsResponse200Item]] """ kwargs = _get_kwargs( @@ -91,7 +91,7 @@ def sync( org_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[HTTPValidationError, list["ListOrgGraphsResponse200Item"]]]: +) -> HTTPValidationError | list[ListOrgGraphsResponse200Item] | None: """List Organization Graphs Get all graphs belonging to an organization. @@ -104,7 +104,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, list['ListOrgGraphsResponse200Item']] + HTTPValidationError | list[ListOrgGraphsResponse200Item] """ return sync_detailed( @@ -117,7 +117,7 @@ async def asyncio_detailed( org_id: str, *, client: AuthenticatedClient, -) -> Response[Union[HTTPValidationError, list["ListOrgGraphsResponse200Item"]]]: +) -> Response[HTTPValidationError | list[ListOrgGraphsResponse200Item]]: """List Organization Graphs Get all graphs belonging to an organization. @@ -130,7 +130,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, list['ListOrgGraphsResponse200Item']]] + Response[HTTPValidationError | list[ListOrgGraphsResponse200Item]] """ kwargs = _get_kwargs( @@ -146,7 +146,7 @@ async def asyncio( org_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[HTTPValidationError, list["ListOrgGraphsResponse200Item"]]]: +) -> HTTPValidationError | list[ListOrgGraphsResponse200Item] | None: """List Organization Graphs Get all graphs belonging to an organization. @@ -159,7 +159,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, list['ListOrgGraphsResponse200Item']] + HTTPValidationError | list[ListOrgGraphsResponse200Item] """ return ( diff --git a/robosystems_client/api/org/list_user_orgs.py b/robosystems_client/api/org/list_user_orgs.py index b6de99e..4f2b8bc 100644 --- a/robosystems_client/api/org/list_user_orgs.py +++ b/robosystems_client/api/org/list_user_orgs.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -19,8 +19,8 @@ def _get_kwargs() -> dict[str, Any]: def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[OrgListResponse]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> OrgListResponse | None: if response.status_code == 200: response_200 = OrgListResponse.from_dict(response.json()) @@ -33,7 +33,7 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response + *, client: AuthenticatedClient | Client, response: httpx.Response ) -> Response[OrgListResponse]: return Response( status_code=HTTPStatus(response.status_code), @@ -71,7 +71,7 @@ def sync_detailed( def sync( *, client: AuthenticatedClient, -) -> Optional[OrgListResponse]: +) -> OrgListResponse | None: """List User's Organizations Get all organizations the current user belongs to, with their role in each. @@ -115,7 +115,7 @@ async def asyncio_detailed( async def asyncio( *, client: AuthenticatedClient, -) -> Optional[OrgListResponse]: +) -> OrgListResponse | None: """List User's Organizations Get all organizations the current user belongs to, with their role in each. diff --git a/robosystems_client/api/org/update_org.py b/robosystems_client/api/org/update_org.py index ab8871f..636f62f 100644 --- a/robosystems_client/api/org/update_org.py +++ b/robosystems_client/api/org/update_org.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -32,8 +32,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, OrgDetailResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | OrgDetailResponse | None: if response.status_code == 200: response_200 = OrgDetailResponse.from_dict(response.json()) @@ -51,8 +51,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, OrgDetailResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | OrgDetailResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -66,7 +66,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: UpdateOrgRequest, -) -> Response[Union[HTTPValidationError, OrgDetailResponse]]: +) -> Response[HTTPValidationError | OrgDetailResponse]: """Update Organization Update organization information. Requires admin or owner role. @@ -80,7 +80,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, OrgDetailResponse]] + Response[HTTPValidationError | OrgDetailResponse] """ kwargs = _get_kwargs( @@ -100,7 +100,7 @@ def sync( *, client: AuthenticatedClient, body: UpdateOrgRequest, -) -> Optional[Union[HTTPValidationError, OrgDetailResponse]]: +) -> HTTPValidationError | OrgDetailResponse | None: """Update Organization Update organization information. Requires admin or owner role. @@ -114,7 +114,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, OrgDetailResponse] + HTTPValidationError | OrgDetailResponse """ return sync_detailed( @@ -129,7 +129,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: UpdateOrgRequest, -) -> Response[Union[HTTPValidationError, OrgDetailResponse]]: +) -> Response[HTTPValidationError | OrgDetailResponse]: """Update Organization Update organization information. Requires admin or owner role. @@ -143,7 +143,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, OrgDetailResponse]] + Response[HTTPValidationError | OrgDetailResponse] """ kwargs = _get_kwargs( @@ -161,7 +161,7 @@ async def asyncio( *, client: AuthenticatedClient, body: UpdateOrgRequest, -) -> Optional[Union[HTTPValidationError, OrgDetailResponse]]: +) -> HTTPValidationError | OrgDetailResponse | None: """Update Organization Update organization information. Requires admin or owner role. @@ -175,7 +175,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, OrgDetailResponse] + HTTPValidationError | OrgDetailResponse """ return ( diff --git a/robosystems_client/api/org_members/invite_org_member.py b/robosystems_client/api/org_members/invite_org_member.py index 14fe3a5..b4e457a 100644 --- a/robosystems_client/api/org_members/invite_org_member.py +++ b/robosystems_client/api/org_members/invite_org_member.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -32,8 +32,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, OrgMemberResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | OrgMemberResponse | None: if response.status_code == 201: response_201 = OrgMemberResponse.from_dict(response.json()) @@ -51,8 +51,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, OrgMemberResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | OrgMemberResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -66,7 +66,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: InviteMemberRequest, -) -> Response[Union[HTTPValidationError, OrgMemberResponse]]: +) -> Response[HTTPValidationError | OrgMemberResponse]: """Invite Member Invite a user to join the organization. Requires admin or owner role. @@ -85,7 +85,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, OrgMemberResponse]] + Response[HTTPValidationError | OrgMemberResponse] """ kwargs = _get_kwargs( @@ -105,7 +105,7 @@ def sync( *, client: AuthenticatedClient, body: InviteMemberRequest, -) -> Optional[Union[HTTPValidationError, OrgMemberResponse]]: +) -> HTTPValidationError | OrgMemberResponse | None: """Invite Member Invite a user to join the organization. Requires admin or owner role. @@ -124,7 +124,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, OrgMemberResponse] + HTTPValidationError | OrgMemberResponse """ return sync_detailed( @@ -139,7 +139,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: InviteMemberRequest, -) -> Response[Union[HTTPValidationError, OrgMemberResponse]]: +) -> Response[HTTPValidationError | OrgMemberResponse]: """Invite Member Invite a user to join the organization. Requires admin or owner role. @@ -158,7 +158,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, OrgMemberResponse]] + Response[HTTPValidationError | OrgMemberResponse] """ kwargs = _get_kwargs( @@ -176,7 +176,7 @@ async def asyncio( *, client: AuthenticatedClient, body: InviteMemberRequest, -) -> Optional[Union[HTTPValidationError, OrgMemberResponse]]: +) -> HTTPValidationError | OrgMemberResponse | None: """Invite Member Invite a user to join the organization. Requires admin or owner role. @@ -195,7 +195,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, OrgMemberResponse] + HTTPValidationError | OrgMemberResponse """ return ( diff --git a/robosystems_client/api/org_members/list_org_members.py b/robosystems_client/api/org_members/list_org_members.py index 4e7562d..35bb4af 100644 --- a/robosystems_client/api/org_members/list_org_members.py +++ b/robosystems_client/api/org_members/list_org_members.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -22,8 +22,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, OrgMemberListResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | OrgMemberListResponse | None: if response.status_code == 200: response_200 = OrgMemberListResponse.from_dict(response.json()) @@ -41,8 +41,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, OrgMemberListResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | OrgMemberListResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -55,7 +55,7 @@ def sync_detailed( org_id: str, *, client: AuthenticatedClient, -) -> Response[Union[HTTPValidationError, OrgMemberListResponse]]: +) -> Response[HTTPValidationError | OrgMemberListResponse]: """List Organization Members Get all members of an organization with their roles. @@ -68,7 +68,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, OrgMemberListResponse]] + Response[HTTPValidationError | OrgMemberListResponse] """ kwargs = _get_kwargs( @@ -86,7 +86,7 @@ def sync( org_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[HTTPValidationError, OrgMemberListResponse]]: +) -> HTTPValidationError | OrgMemberListResponse | None: """List Organization Members Get all members of an organization with their roles. @@ -99,7 +99,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, OrgMemberListResponse] + HTTPValidationError | OrgMemberListResponse """ return sync_detailed( @@ -112,7 +112,7 @@ async def asyncio_detailed( org_id: str, *, client: AuthenticatedClient, -) -> Response[Union[HTTPValidationError, OrgMemberListResponse]]: +) -> Response[HTTPValidationError | OrgMemberListResponse]: """List Organization Members Get all members of an organization with their roles. @@ -125,7 +125,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, OrgMemberListResponse]] + Response[HTTPValidationError | OrgMemberListResponse] """ kwargs = _get_kwargs( @@ -141,7 +141,7 @@ async def asyncio( org_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[HTTPValidationError, OrgMemberListResponse]]: +) -> HTTPValidationError | OrgMemberListResponse | None: """List Organization Members Get all members of an organization with their roles. @@ -154,7 +154,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, OrgMemberListResponse] + HTTPValidationError | OrgMemberListResponse """ return ( diff --git a/robosystems_client/api/org_members/remove_org_member.py b/robosystems_client/api/org_members/remove_org_member.py index 7053289..54b5e03 100644 --- a/robosystems_client/api/org_members/remove_org_member.py +++ b/robosystems_client/api/org_members/remove_org_member.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -22,8 +22,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | HTTPValidationError | None: if response.status_code == 204: response_204 = cast(Any, None) return response_204 @@ -40,8 +40,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -55,7 +55,7 @@ def sync_detailed( user_id: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, HTTPValidationError]]: +) -> Response[Any | HTTPValidationError]: """Remove Member Remove a member from the organization. Requires admin or owner role. @@ -69,7 +69,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError]] + Response[Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -89,7 +89,7 @@ def sync( user_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, HTTPValidationError]]: +) -> Any | HTTPValidationError | None: """Remove Member Remove a member from the organization. Requires admin or owner role. @@ -103,7 +103,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError] + Any | HTTPValidationError """ return sync_detailed( @@ -118,7 +118,7 @@ async def asyncio_detailed( user_id: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, HTTPValidationError]]: +) -> Response[Any | HTTPValidationError]: """Remove Member Remove a member from the organization. Requires admin or owner role. @@ -132,7 +132,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError]] + Response[Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -150,7 +150,7 @@ async def asyncio( user_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, HTTPValidationError]]: +) -> Any | HTTPValidationError | None: """Remove Member Remove a member from the organization. Requires admin or owner role. @@ -164,7 +164,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError] + Any | HTTPValidationError """ return ( diff --git a/robosystems_client/api/org_members/update_org_member_role.py b/robosystems_client/api/org_members/update_org_member_role.py index f390c91..65bb2bb 100644 --- a/robosystems_client/api/org_members/update_org_member_role.py +++ b/robosystems_client/api/org_members/update_org_member_role.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -33,8 +33,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, OrgMemberResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | OrgMemberResponse | None: if response.status_code == 200: response_200 = OrgMemberResponse.from_dict(response.json()) @@ -52,8 +52,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, OrgMemberResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | OrgMemberResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -68,7 +68,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: UpdateMemberRoleRequest, -) -> Response[Union[HTTPValidationError, OrgMemberResponse]]: +) -> Response[HTTPValidationError | OrgMemberResponse]: """Update Member Role Update a member's role in the organization. Requires admin or owner role. @@ -83,7 +83,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, OrgMemberResponse]] + Response[HTTPValidationError | OrgMemberResponse] """ kwargs = _get_kwargs( @@ -105,7 +105,7 @@ def sync( *, client: AuthenticatedClient, body: UpdateMemberRoleRequest, -) -> Optional[Union[HTTPValidationError, OrgMemberResponse]]: +) -> HTTPValidationError | OrgMemberResponse | None: """Update Member Role Update a member's role in the organization. Requires admin or owner role. @@ -120,7 +120,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, OrgMemberResponse] + HTTPValidationError | OrgMemberResponse """ return sync_detailed( @@ -137,7 +137,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: UpdateMemberRoleRequest, -) -> Response[Union[HTTPValidationError, OrgMemberResponse]]: +) -> Response[HTTPValidationError | OrgMemberResponse]: """Update Member Role Update a member's role in the organization. Requires admin or owner role. @@ -152,7 +152,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, OrgMemberResponse]] + Response[HTTPValidationError | OrgMemberResponse] """ kwargs = _get_kwargs( @@ -172,7 +172,7 @@ async def asyncio( *, client: AuthenticatedClient, body: UpdateMemberRoleRequest, -) -> Optional[Union[HTTPValidationError, OrgMemberResponse]]: +) -> HTTPValidationError | OrgMemberResponse | None: """Update Member Role Update a member's role in the organization. Requires admin or owner role. @@ -187,7 +187,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, OrgMemberResponse] + HTTPValidationError | OrgMemberResponse """ return ( diff --git a/robosystems_client/api/org_usage/get_org_limits.py b/robosystems_client/api/org_usage/get_org_limits.py index 161f9a2..cbb0e2c 100644 --- a/robosystems_client/api/org_usage/get_org_limits.py +++ b/robosystems_client/api/org_usage/get_org_limits.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -22,8 +22,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, OrgLimitsResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | OrgLimitsResponse | None: if response.status_code == 200: response_200 = OrgLimitsResponse.from_dict(response.json()) @@ -41,8 +41,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, OrgLimitsResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | OrgLimitsResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -55,7 +55,7 @@ def sync_detailed( org_id: str, *, client: AuthenticatedClient, -) -> Response[Union[HTTPValidationError, OrgLimitsResponse]]: +) -> Response[HTTPValidationError | OrgLimitsResponse]: """Get Organization Limits Get the current limits and quotas for an organization. @@ -68,7 +68,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, OrgLimitsResponse]] + Response[HTTPValidationError | OrgLimitsResponse] """ kwargs = _get_kwargs( @@ -86,7 +86,7 @@ def sync( org_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[HTTPValidationError, OrgLimitsResponse]]: +) -> HTTPValidationError | OrgLimitsResponse | None: """Get Organization Limits Get the current limits and quotas for an organization. @@ -99,7 +99,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, OrgLimitsResponse] + HTTPValidationError | OrgLimitsResponse """ return sync_detailed( @@ -112,7 +112,7 @@ async def asyncio_detailed( org_id: str, *, client: AuthenticatedClient, -) -> Response[Union[HTTPValidationError, OrgLimitsResponse]]: +) -> Response[HTTPValidationError | OrgLimitsResponse]: """Get Organization Limits Get the current limits and quotas for an organization. @@ -125,7 +125,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, OrgLimitsResponse]] + Response[HTTPValidationError | OrgLimitsResponse] """ kwargs = _get_kwargs( @@ -141,7 +141,7 @@ async def asyncio( org_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[HTTPValidationError, OrgLimitsResponse]]: +) -> HTTPValidationError | OrgLimitsResponse | None: """Get Organization Limits Get the current limits and quotas for an organization. @@ -154,7 +154,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, OrgLimitsResponse] + HTTPValidationError | OrgLimitsResponse """ return ( diff --git a/robosystems_client/api/org_usage/get_org_usage.py b/robosystems_client/api/org_usage/get_org_usage.py index 05384ca..c9d0956 100644 --- a/robosystems_client/api/org_usage/get_org_usage.py +++ b/robosystems_client/api/org_usage/get_org_usage.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -13,7 +13,7 @@ def _get_kwargs( org_id: str, *, - days: Union[Unset, int] = 30, + days: int | Unset = 30, ) -> dict[str, Any]: params: dict[str, Any] = {} @@ -31,8 +31,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, OrgUsageResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | OrgUsageResponse | None: if response.status_code == 200: response_200 = OrgUsageResponse.from_dict(response.json()) @@ -50,8 +50,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, OrgUsageResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | OrgUsageResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -64,22 +64,22 @@ def sync_detailed( org_id: str, *, client: AuthenticatedClient, - days: Union[Unset, int] = 30, -) -> Response[Union[HTTPValidationError, OrgUsageResponse]]: + days: int | Unset = 30, +) -> Response[HTTPValidationError | OrgUsageResponse]: """Get Organization Usage Get detailed usage statistics for an organization aggregated across all graphs. Args: org_id (str): - days (Union[Unset, int]): Default: 30. + days (int | Unset): Default: 30. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, OrgUsageResponse]] + Response[HTTPValidationError | OrgUsageResponse] """ kwargs = _get_kwargs( @@ -98,22 +98,22 @@ def sync( org_id: str, *, client: AuthenticatedClient, - days: Union[Unset, int] = 30, -) -> Optional[Union[HTTPValidationError, OrgUsageResponse]]: + days: int | Unset = 30, +) -> HTTPValidationError | OrgUsageResponse | None: """Get Organization Usage Get detailed usage statistics for an organization aggregated across all graphs. Args: org_id (str): - days (Union[Unset, int]): Default: 30. + days (int | Unset): Default: 30. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, OrgUsageResponse] + HTTPValidationError | OrgUsageResponse """ return sync_detailed( @@ -127,22 +127,22 @@ async def asyncio_detailed( org_id: str, *, client: AuthenticatedClient, - days: Union[Unset, int] = 30, -) -> Response[Union[HTTPValidationError, OrgUsageResponse]]: + days: int | Unset = 30, +) -> Response[HTTPValidationError | OrgUsageResponse]: """Get Organization Usage Get detailed usage statistics for an organization aggregated across all graphs. Args: org_id (str): - days (Union[Unset, int]): Default: 30. + days (int | Unset): Default: 30. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, OrgUsageResponse]] + Response[HTTPValidationError | OrgUsageResponse] """ kwargs = _get_kwargs( @@ -159,22 +159,22 @@ async def asyncio( org_id: str, *, client: AuthenticatedClient, - days: Union[Unset, int] = 30, -) -> Optional[Union[HTTPValidationError, OrgUsageResponse]]: + days: int | Unset = 30, +) -> HTTPValidationError | OrgUsageResponse | None: """Get Organization Usage Get detailed usage statistics for an organization aggregated across all graphs. Args: org_id (str): - days (Union[Unset, int]): Default: 30. + days (int | Unset): Default: 30. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, OrgUsageResponse] + HTTPValidationError | OrgUsageResponse """ return ( diff --git a/robosystems_client/api/query/execute_cypher_query.py b/robosystems_client/api/query/execute_cypher_query.py index 1f79781..f727af0 100644 --- a/robosystems_client/api/query/execute_cypher_query.py +++ b/robosystems_client/api/query/execute_cypher_query.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -16,15 +16,15 @@ def _get_kwargs( graph_id: str, *, body: CypherQueryRequest, - mode: Union[None, ResponseMode, Unset] = UNSET, - chunk_size: Union[None, Unset, int] = UNSET, - test_mode: Union[Unset, bool] = False, + mode: None | ResponseMode | Unset = UNSET, + chunk_size: int | None | Unset = UNSET, + test_mode: bool | Unset = False, ) -> dict[str, Any]: headers: dict[str, Any] = {} params: dict[str, Any] = {} - json_mode: Union[None, Unset, str] + json_mode: None | str | Unset if isinstance(mode, Unset): json_mode = UNSET elif isinstance(mode, ResponseMode): @@ -33,7 +33,7 @@ def _get_kwargs( json_mode = mode params["mode"] = json_mode - json_chunk_size: Union[None, Unset, int] + json_chunk_size: int | None | Unset if isinstance(chunk_size, Unset): json_chunk_size = UNSET else: @@ -59,8 +59,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | ExecuteCypherQueryResponse200 | HTTPValidationError | None: if response.status_code == 200: content_type = response.headers.get("content-type", "") if ( @@ -112,8 +112,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | ExecuteCypherQueryResponse200 | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -127,17 +127,20 @@ def sync_detailed( *, client: AuthenticatedClient, body: CypherQueryRequest, - mode: Union[None, ResponseMode, Unset] = UNSET, - chunk_size: Union[None, Unset, int] = UNSET, - test_mode: Union[Unset, bool] = False, -) -> Response[Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError]]: - r"""Execute Cypher Query (Read-Only) + mode: None | ResponseMode | Unset = UNSET, + chunk_size: int | None | Unset = UNSET, + test_mode: bool | Unset = False, +) -> Response[Any | ExecuteCypherQueryResponse200 | HTTPValidationError]: + r"""Execute Cypher Query - Execute a read-only Cypher query with intelligent response optimization. + Execute a Cypher query with intelligent response optimization. - **IMPORTANT: This endpoint is READ-ONLY.** Write operations (CREATE, MERGE, SET, DELETE) are not - allowed. - To load data into your graph, use the staging pipeline: + **IMPORTANT: Write operations depend on graph type:** + - **Main Graphs**: READ-ONLY. Write operations (CREATE, MERGE, SET, DELETE) are not allowed. + - **Subgraphs**: WRITE-ENABLED. Full Cypher write operations are supported for development and + report creation. + + To load data into main graphs, use the staging pipeline: 1. Create file upload: `POST /v1/graphs/{graph_id}/tables/{table_name}/files` 2. Ingest to graph: `POST /v1/graphs/{graph_id}/tables/ingest` @@ -205,9 +208,9 @@ def sync_detailed( Args: graph_id (str): - mode (Union[None, ResponseMode, Unset]): Response mode override - chunk_size (Union[None, Unset, int]): Rows per chunk for streaming - test_mode (Union[Unset, bool]): Enable test mode for better debugging Default: False. + mode (None | ResponseMode | Unset): Response mode override + chunk_size (int | None | Unset): Rows per chunk for streaming + test_mode (bool | Unset): Enable test mode for better debugging Default: False. body (CypherQueryRequest): Request model for Cypher query execution. Raises: @@ -215,7 +218,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError]] + Response[Any | ExecuteCypherQueryResponse200 | HTTPValidationError] """ kwargs = _get_kwargs( @@ -238,17 +241,20 @@ def sync( *, client: AuthenticatedClient, body: CypherQueryRequest, - mode: Union[None, ResponseMode, Unset] = UNSET, - chunk_size: Union[None, Unset, int] = UNSET, - test_mode: Union[Unset, bool] = False, -) -> Optional[Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError]]: - r"""Execute Cypher Query (Read-Only) + mode: None | ResponseMode | Unset = UNSET, + chunk_size: int | None | Unset = UNSET, + test_mode: bool | Unset = False, +) -> Any | ExecuteCypherQueryResponse200 | HTTPValidationError | None: + r"""Execute Cypher Query + + Execute a Cypher query with intelligent response optimization. - Execute a read-only Cypher query with intelligent response optimization. + **IMPORTANT: Write operations depend on graph type:** + - **Main Graphs**: READ-ONLY. Write operations (CREATE, MERGE, SET, DELETE) are not allowed. + - **Subgraphs**: WRITE-ENABLED. Full Cypher write operations are supported for development and + report creation. - **IMPORTANT: This endpoint is READ-ONLY.** Write operations (CREATE, MERGE, SET, DELETE) are not - allowed. - To load data into your graph, use the staging pipeline: + To load data into main graphs, use the staging pipeline: 1. Create file upload: `POST /v1/graphs/{graph_id}/tables/{table_name}/files` 2. Ingest to graph: `POST /v1/graphs/{graph_id}/tables/ingest` @@ -316,9 +322,9 @@ def sync( Args: graph_id (str): - mode (Union[None, ResponseMode, Unset]): Response mode override - chunk_size (Union[None, Unset, int]): Rows per chunk for streaming - test_mode (Union[Unset, bool]): Enable test mode for better debugging Default: False. + mode (None | ResponseMode | Unset): Response mode override + chunk_size (int | None | Unset): Rows per chunk for streaming + test_mode (bool | Unset): Enable test mode for better debugging Default: False. body (CypherQueryRequest): Request model for Cypher query execution. Raises: @@ -326,7 +332,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError] + Any | ExecuteCypherQueryResponse200 | HTTPValidationError """ return sync_detailed( @@ -344,17 +350,20 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: CypherQueryRequest, - mode: Union[None, ResponseMode, Unset] = UNSET, - chunk_size: Union[None, Unset, int] = UNSET, - test_mode: Union[Unset, bool] = False, -) -> Response[Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError]]: - r"""Execute Cypher Query (Read-Only) + mode: None | ResponseMode | Unset = UNSET, + chunk_size: int | None | Unset = UNSET, + test_mode: bool | Unset = False, +) -> Response[Any | ExecuteCypherQueryResponse200 | HTTPValidationError]: + r"""Execute Cypher Query - Execute a read-only Cypher query with intelligent response optimization. + Execute a Cypher query with intelligent response optimization. - **IMPORTANT: This endpoint is READ-ONLY.** Write operations (CREATE, MERGE, SET, DELETE) are not - allowed. - To load data into your graph, use the staging pipeline: + **IMPORTANT: Write operations depend on graph type:** + - **Main Graphs**: READ-ONLY. Write operations (CREATE, MERGE, SET, DELETE) are not allowed. + - **Subgraphs**: WRITE-ENABLED. Full Cypher write operations are supported for development and + report creation. + + To load data into main graphs, use the staging pipeline: 1. Create file upload: `POST /v1/graphs/{graph_id}/tables/{table_name}/files` 2. Ingest to graph: `POST /v1/graphs/{graph_id}/tables/ingest` @@ -422,9 +431,9 @@ async def asyncio_detailed( Args: graph_id (str): - mode (Union[None, ResponseMode, Unset]): Response mode override - chunk_size (Union[None, Unset, int]): Rows per chunk for streaming - test_mode (Union[Unset, bool]): Enable test mode for better debugging Default: False. + mode (None | ResponseMode | Unset): Response mode override + chunk_size (int | None | Unset): Rows per chunk for streaming + test_mode (bool | Unset): Enable test mode for better debugging Default: False. body (CypherQueryRequest): Request model for Cypher query execution. Raises: @@ -432,7 +441,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError]] + Response[Any | ExecuteCypherQueryResponse200 | HTTPValidationError] """ kwargs = _get_kwargs( @@ -453,17 +462,20 @@ async def asyncio( *, client: AuthenticatedClient, body: CypherQueryRequest, - mode: Union[None, ResponseMode, Unset] = UNSET, - chunk_size: Union[None, Unset, int] = UNSET, - test_mode: Union[Unset, bool] = False, -) -> Optional[Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError]]: - r"""Execute Cypher Query (Read-Only) + mode: None | ResponseMode | Unset = UNSET, + chunk_size: int | None | Unset = UNSET, + test_mode: bool | Unset = False, +) -> Any | ExecuteCypherQueryResponse200 | HTTPValidationError | None: + r"""Execute Cypher Query + + Execute a Cypher query with intelligent response optimization. - Execute a read-only Cypher query with intelligent response optimization. + **IMPORTANT: Write operations depend on graph type:** + - **Main Graphs**: READ-ONLY. Write operations (CREATE, MERGE, SET, DELETE) are not allowed. + - **Subgraphs**: WRITE-ENABLED. Full Cypher write operations are supported for development and + report creation. - **IMPORTANT: This endpoint is READ-ONLY.** Write operations (CREATE, MERGE, SET, DELETE) are not - allowed. - To load data into your graph, use the staging pipeline: + To load data into main graphs, use the staging pipeline: 1. Create file upload: `POST /v1/graphs/{graph_id}/tables/{table_name}/files` 2. Ingest to graph: `POST /v1/graphs/{graph_id}/tables/ingest` @@ -531,9 +543,9 @@ async def asyncio( Args: graph_id (str): - mode (Union[None, ResponseMode, Unset]): Response mode override - chunk_size (Union[None, Unset, int]): Rows per chunk for streaming - test_mode (Union[Unset, bool]): Enable test mode for better debugging Default: False. + mode (None | ResponseMode | Unset): Response mode override + chunk_size (int | None | Unset): Rows per chunk for streaming + test_mode (bool | Unset): Enable test mode for better debugging Default: False. body (CypherQueryRequest): Request model for Cypher query execution. Raises: @@ -541,7 +553,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError] + Any | ExecuteCypherQueryResponse200 | HTTPValidationError """ return ( diff --git a/robosystems_client/api/schema/export_graph_schema.py b/robosystems_client/api/schema/export_graph_schema.py index a2332e6..5a2a0de 100644 --- a/robosystems_client/api/schema/export_graph_schema.py +++ b/robosystems_client/api/schema/export_graph_schema.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -13,8 +13,8 @@ def _get_kwargs( graph_id: str, *, - format_: Union[Unset, str] = "json", - include_data_stats: Union[Unset, bool] = False, + format_: str | Unset = "json", + include_data_stats: bool | Unset = False, ) -> dict[str, Any]: params: dict[str, Any] = {} @@ -34,8 +34,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, HTTPValidationError, SchemaExportResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | HTTPValidationError | SchemaExportResponse | None: if response.status_code == 200: response_200 = SchemaExportResponse.from_dict(response.json()) @@ -65,8 +65,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, HTTPValidationError, SchemaExportResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | HTTPValidationError | SchemaExportResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -79,9 +79,9 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, - format_: Union[Unset, str] = "json", - include_data_stats: Union[Unset, bool] = False, -) -> Response[Union[Any, HTTPValidationError, SchemaExportResponse]]: + format_: str | Unset = "json", + include_data_stats: bool | Unset = False, +) -> Response[Any | HTTPValidationError | SchemaExportResponse]: """Export Declared Graph Schema Export the declared schema definition of an existing graph. @@ -134,16 +134,16 @@ def sync_detailed( Args: graph_id (str): - format_ (Union[Unset, str]): Export format: json, yaml, or cypher Default: 'json'. - include_data_stats (Union[Unset, bool]): Include statistics about actual data in the graph - (node counts, relationship counts) Default: False. + format_ (str | Unset): Export format: json, yaml, or cypher Default: 'json'. + include_data_stats (bool | Unset): Include statistics about actual data in the graph (node + counts, relationship counts) Default: False. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError, SchemaExportResponse]] + Response[Any | HTTPValidationError | SchemaExportResponse] """ kwargs = _get_kwargs( @@ -163,9 +163,9 @@ def sync( graph_id: str, *, client: AuthenticatedClient, - format_: Union[Unset, str] = "json", - include_data_stats: Union[Unset, bool] = False, -) -> Optional[Union[Any, HTTPValidationError, SchemaExportResponse]]: + format_: str | Unset = "json", + include_data_stats: bool | Unset = False, +) -> Any | HTTPValidationError | SchemaExportResponse | None: """Export Declared Graph Schema Export the declared schema definition of an existing graph. @@ -218,16 +218,16 @@ def sync( Args: graph_id (str): - format_ (Union[Unset, str]): Export format: json, yaml, or cypher Default: 'json'. - include_data_stats (Union[Unset, bool]): Include statistics about actual data in the graph - (node counts, relationship counts) Default: False. + format_ (str | Unset): Export format: json, yaml, or cypher Default: 'json'. + include_data_stats (bool | Unset): Include statistics about actual data in the graph (node + counts, relationship counts) Default: False. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError, SchemaExportResponse] + Any | HTTPValidationError | SchemaExportResponse """ return sync_detailed( @@ -242,9 +242,9 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, - format_: Union[Unset, str] = "json", - include_data_stats: Union[Unset, bool] = False, -) -> Response[Union[Any, HTTPValidationError, SchemaExportResponse]]: + format_: str | Unset = "json", + include_data_stats: bool | Unset = False, +) -> Response[Any | HTTPValidationError | SchemaExportResponse]: """Export Declared Graph Schema Export the declared schema definition of an existing graph. @@ -297,16 +297,16 @@ async def asyncio_detailed( Args: graph_id (str): - format_ (Union[Unset, str]): Export format: json, yaml, or cypher Default: 'json'. - include_data_stats (Union[Unset, bool]): Include statistics about actual data in the graph - (node counts, relationship counts) Default: False. + format_ (str | Unset): Export format: json, yaml, or cypher Default: 'json'. + include_data_stats (bool | Unset): Include statistics about actual data in the graph (node + counts, relationship counts) Default: False. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError, SchemaExportResponse]] + Response[Any | HTTPValidationError | SchemaExportResponse] """ kwargs = _get_kwargs( @@ -324,9 +324,9 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, - format_: Union[Unset, str] = "json", - include_data_stats: Union[Unset, bool] = False, -) -> Optional[Union[Any, HTTPValidationError, SchemaExportResponse]]: + format_: str | Unset = "json", + include_data_stats: bool | Unset = False, +) -> Any | HTTPValidationError | SchemaExportResponse | None: """Export Declared Graph Schema Export the declared schema definition of an existing graph. @@ -379,16 +379,16 @@ async def asyncio( Args: graph_id (str): - format_ (Union[Unset, str]): Export format: json, yaml, or cypher Default: 'json'. - include_data_stats (Union[Unset, bool]): Include statistics about actual data in the graph - (node counts, relationship counts) Default: False. + format_ (str | Unset): Export format: json, yaml, or cypher Default: 'json'. + include_data_stats (bool | Unset): Include statistics about actual data in the graph (node + counts, relationship counts) Default: False. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError, SchemaExportResponse] + Any | HTTPValidationError | SchemaExportResponse """ return ( diff --git a/robosystems_client/api/schema/get_graph_schema.py b/robosystems_client/api/schema/get_graph_schema.py index ab9f363..cc07b5b 100644 --- a/robosystems_client/api/schema/get_graph_schema.py +++ b/robosystems_client/api/schema/get_graph_schema.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -22,8 +22,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, HTTPValidationError, SchemaInfoResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | HTTPValidationError | SchemaInfoResponse | None: if response.status_code == 200: response_200 = SchemaInfoResponse.from_dict(response.json()) @@ -53,8 +53,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, HTTPValidationError, SchemaInfoResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | HTTPValidationError | SchemaInfoResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -67,7 +67,7 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, HTTPValidationError, SchemaInfoResponse]]: +) -> Response[Any | HTTPValidationError | SchemaInfoResponse]: """Get Runtime Graph Schema Get runtime schema information for the specified graph database. @@ -123,7 +123,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError, SchemaInfoResponse]] + Response[Any | HTTPValidationError | SchemaInfoResponse] """ kwargs = _get_kwargs( @@ -141,7 +141,7 @@ def sync( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, HTTPValidationError, SchemaInfoResponse]]: +) -> Any | HTTPValidationError | SchemaInfoResponse | None: """Get Runtime Graph Schema Get runtime schema information for the specified graph database. @@ -197,7 +197,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError, SchemaInfoResponse] + Any | HTTPValidationError | SchemaInfoResponse """ return sync_detailed( @@ -210,7 +210,7 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, HTTPValidationError, SchemaInfoResponse]]: +) -> Response[Any | HTTPValidationError | SchemaInfoResponse]: """Get Runtime Graph Schema Get runtime schema information for the specified graph database. @@ -266,7 +266,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError, SchemaInfoResponse]] + Response[Any | HTTPValidationError | SchemaInfoResponse] """ kwargs = _get_kwargs( @@ -282,7 +282,7 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, HTTPValidationError, SchemaInfoResponse]]: +) -> Any | HTTPValidationError | SchemaInfoResponse | None: """Get Runtime Graph Schema Get runtime schema information for the specified graph database. @@ -338,7 +338,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError, SchemaInfoResponse] + Any | HTTPValidationError | SchemaInfoResponse """ return ( diff --git a/robosystems_client/api/schema/validate_schema.py b/robosystems_client/api/schema/validate_schema.py index 4a880d7..287570c 100644 --- a/robosystems_client/api/schema/validate_schema.py +++ b/robosystems_client/api/schema/validate_schema.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -32,8 +32,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[ErrorResponse, SchemaValidationResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ErrorResponse | SchemaValidationResponse | None: if response.status_code == 200: response_200 = SchemaValidationResponse.from_dict(response.json()) @@ -66,8 +66,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[ErrorResponse, SchemaValidationResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[ErrorResponse | SchemaValidationResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -81,7 +81,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: SchemaValidationRequest, -) -> Response[Union[ErrorResponse, SchemaValidationResponse]]: +) -> Response[ErrorResponse | SchemaValidationResponse]: """Validate Schema Validate a custom schema definition before deployment. @@ -123,7 +123,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, SchemaValidationResponse]] + Response[ErrorResponse | SchemaValidationResponse] """ kwargs = _get_kwargs( @@ -143,7 +143,7 @@ def sync( *, client: AuthenticatedClient, body: SchemaValidationRequest, -) -> Optional[Union[ErrorResponse, SchemaValidationResponse]]: +) -> ErrorResponse | SchemaValidationResponse | None: """Validate Schema Validate a custom schema definition before deployment. @@ -185,7 +185,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, SchemaValidationResponse] + ErrorResponse | SchemaValidationResponse """ return sync_detailed( @@ -200,7 +200,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: SchemaValidationRequest, -) -> Response[Union[ErrorResponse, SchemaValidationResponse]]: +) -> Response[ErrorResponse | SchemaValidationResponse]: """Validate Schema Validate a custom schema definition before deployment. @@ -242,7 +242,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, SchemaValidationResponse]] + Response[ErrorResponse | SchemaValidationResponse] """ kwargs = _get_kwargs( @@ -260,7 +260,7 @@ async def asyncio( *, client: AuthenticatedClient, body: SchemaValidationRequest, -) -> Optional[Union[ErrorResponse, SchemaValidationResponse]]: +) -> ErrorResponse | SchemaValidationResponse | None: """Validate Schema Validate a custom schema definition before deployment. @@ -302,7 +302,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, SchemaValidationResponse] + ErrorResponse | SchemaValidationResponse """ return ( diff --git a/robosystems_client/api/service_offerings/get_service_offerings.py b/robosystems_client/api/service_offerings/get_service_offerings.py index 1d294c7..cfa231c 100644 --- a/robosystems_client/api/service_offerings/get_service_offerings.py +++ b/robosystems_client/api/service_offerings/get_service_offerings.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -20,8 +20,8 @@ def _get_kwargs() -> dict[str, Any]: def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[ErrorResponse, ServiceOfferingsResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ErrorResponse | ServiceOfferingsResponse | None: if response.status_code == 200: response_200 = ServiceOfferingsResponse.from_dict(response.json()) @@ -39,8 +39,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[ErrorResponse, ServiceOfferingsResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[ErrorResponse | ServiceOfferingsResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -51,8 +51,8 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], -) -> Response[Union[ErrorResponse, ServiceOfferingsResponse]]: + client: AuthenticatedClient | Client, +) -> Response[ErrorResponse | ServiceOfferingsResponse]: """Get Service Offerings Get comprehensive information about all subscription offerings. @@ -83,7 +83,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, ServiceOfferingsResponse]] + Response[ErrorResponse | ServiceOfferingsResponse] """ kwargs = _get_kwargs() @@ -97,8 +97,8 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[Union[ErrorResponse, ServiceOfferingsResponse]]: + client: AuthenticatedClient | Client, +) -> ErrorResponse | ServiceOfferingsResponse | None: """Get Service Offerings Get comprehensive information about all subscription offerings. @@ -129,7 +129,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, ServiceOfferingsResponse] + ErrorResponse | ServiceOfferingsResponse """ return sync_detailed( @@ -139,8 +139,8 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], -) -> Response[Union[ErrorResponse, ServiceOfferingsResponse]]: + client: AuthenticatedClient | Client, +) -> Response[ErrorResponse | ServiceOfferingsResponse]: """Get Service Offerings Get comprehensive information about all subscription offerings. @@ -171,7 +171,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, ServiceOfferingsResponse]] + Response[ErrorResponse | ServiceOfferingsResponse] """ kwargs = _get_kwargs() @@ -183,8 +183,8 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[Union[ErrorResponse, ServiceOfferingsResponse]]: + client: AuthenticatedClient | Client, +) -> ErrorResponse | ServiceOfferingsResponse | None: """Get Service Offerings Get comprehensive information about all subscription offerings. @@ -215,7 +215,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, ServiceOfferingsResponse] + ErrorResponse | ServiceOfferingsResponse """ return ( diff --git a/robosystems_client/api/status/get_service_status.py b/robosystems_client/api/status/get_service_status.py index de434ff..605ffad 100644 --- a/robosystems_client/api/status/get_service_status.py +++ b/robosystems_client/api/status/get_service_status.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -19,8 +19,8 @@ def _get_kwargs() -> dict[str, Any]: def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[HealthStatus]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HealthStatus | None: if response.status_code == 200: response_200 = HealthStatus.from_dict(response.json()) @@ -33,7 +33,7 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response + *, client: AuthenticatedClient | Client, response: httpx.Response ) -> Response[HealthStatus]: return Response( status_code=HTTPStatus(response.status_code), @@ -45,7 +45,7 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[HealthStatus]: """Health Check @@ -70,8 +70,8 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[HealthStatus]: + client: AuthenticatedClient | Client, +) -> HealthStatus | None: """Health Check Service health check endpoint for monitoring and load balancers @@ -91,7 +91,7 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[HealthStatus]: """Health Check @@ -114,8 +114,8 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[HealthStatus]: + client: AuthenticatedClient | Client, +) -> HealthStatus | None: """Health Check Service health check endpoint for monitoring and load balancers diff --git a/robosystems_client/api/subgraphs/create_subgraph.py b/robosystems_client/api/subgraphs/create_subgraph.py index 33bd206..424647f 100644 --- a/robosystems_client/api/subgraphs/create_subgraph.py +++ b/robosystems_client/api/subgraphs/create_subgraph.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -32,12 +32,12 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, SubgraphResponse]]: - if response.status_code == 201: - response_201 = SubgraphResponse.from_dict(response.json()) + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | SubgraphResponse | None: + if response.status_code == 200: + response_200 = SubgraphResponse.from_dict(response.json()) - return response_201 + return response_200 if response.status_code == 422: response_422 = HTTPValidationError.from_dict(response.json()) @@ -51,8 +51,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, SubgraphResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | SubgraphResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -66,10 +66,10 @@ def sync_detailed( *, client: AuthenticatedClient, body: CreateSubgraphRequest, -) -> Response[Union[HTTPValidationError, SubgraphResponse]]: +) -> Response[HTTPValidationError | SubgraphResponse]: """Create Subgraph - Create a new subgraph within a parent graph. + Create a new subgraph within a parent graph, with optional data forking. **Requirements:** - Valid authentication @@ -79,9 +79,18 @@ def sync_detailed( - Must be within subgraph quota limits - Subgraph name must be unique within the parent graph + **Fork Mode:** + When `fork_parent=true`, the operation: + - Returns immediately with an operation_id for SSE monitoring + - Copies data from parent graph to the new subgraph + - Supports selective forking via metadata.fork_options + - Tracks progress in real-time via SSE + **Returns:** - - Created subgraph details including its unique ID - - Subgraph ID format: `{parent_id}_{subgraph_name}` (e.g., kg1234567890abcdef_dev) + - Without fork: Immediate SubgraphResponse with created subgraph details + - With fork: Operation response with SSE monitoring endpoint + + **Subgraph ID format:** `{parent_id}_{subgraph_name}` (e.g., kg1234567890abcdef_dev) **Usage:** - Subgraphs share parent's credit pool @@ -97,7 +106,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, SubgraphResponse]] + Response[HTTPValidationError | SubgraphResponse] """ kwargs = _get_kwargs( @@ -117,10 +126,10 @@ def sync( *, client: AuthenticatedClient, body: CreateSubgraphRequest, -) -> Optional[Union[HTTPValidationError, SubgraphResponse]]: +) -> HTTPValidationError | SubgraphResponse | None: """Create Subgraph - Create a new subgraph within a parent graph. + Create a new subgraph within a parent graph, with optional data forking. **Requirements:** - Valid authentication @@ -130,9 +139,18 @@ def sync( - Must be within subgraph quota limits - Subgraph name must be unique within the parent graph + **Fork Mode:** + When `fork_parent=true`, the operation: + - Returns immediately with an operation_id for SSE monitoring + - Copies data from parent graph to the new subgraph + - Supports selective forking via metadata.fork_options + - Tracks progress in real-time via SSE + **Returns:** - - Created subgraph details including its unique ID - - Subgraph ID format: `{parent_id}_{subgraph_name}` (e.g., kg1234567890abcdef_dev) + - Without fork: Immediate SubgraphResponse with created subgraph details + - With fork: Operation response with SSE monitoring endpoint + + **Subgraph ID format:** `{parent_id}_{subgraph_name}` (e.g., kg1234567890abcdef_dev) **Usage:** - Subgraphs share parent's credit pool @@ -148,7 +166,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, SubgraphResponse] + HTTPValidationError | SubgraphResponse """ return sync_detailed( @@ -163,10 +181,10 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: CreateSubgraphRequest, -) -> Response[Union[HTTPValidationError, SubgraphResponse]]: +) -> Response[HTTPValidationError | SubgraphResponse]: """Create Subgraph - Create a new subgraph within a parent graph. + Create a new subgraph within a parent graph, with optional data forking. **Requirements:** - Valid authentication @@ -176,9 +194,18 @@ async def asyncio_detailed( - Must be within subgraph quota limits - Subgraph name must be unique within the parent graph + **Fork Mode:** + When `fork_parent=true`, the operation: + - Returns immediately with an operation_id for SSE monitoring + - Copies data from parent graph to the new subgraph + - Supports selective forking via metadata.fork_options + - Tracks progress in real-time via SSE + **Returns:** - - Created subgraph details including its unique ID - - Subgraph ID format: `{parent_id}_{subgraph_name}` (e.g., kg1234567890abcdef_dev) + - Without fork: Immediate SubgraphResponse with created subgraph details + - With fork: Operation response with SSE monitoring endpoint + + **Subgraph ID format:** `{parent_id}_{subgraph_name}` (e.g., kg1234567890abcdef_dev) **Usage:** - Subgraphs share parent's credit pool @@ -194,7 +221,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, SubgraphResponse]] + Response[HTTPValidationError | SubgraphResponse] """ kwargs = _get_kwargs( @@ -212,10 +239,10 @@ async def asyncio( *, client: AuthenticatedClient, body: CreateSubgraphRequest, -) -> Optional[Union[HTTPValidationError, SubgraphResponse]]: +) -> HTTPValidationError | SubgraphResponse | None: """Create Subgraph - Create a new subgraph within a parent graph. + Create a new subgraph within a parent graph, with optional data forking. **Requirements:** - Valid authentication @@ -225,9 +252,18 @@ async def asyncio( - Must be within subgraph quota limits - Subgraph name must be unique within the parent graph + **Fork Mode:** + When `fork_parent=true`, the operation: + - Returns immediately with an operation_id for SSE monitoring + - Copies data from parent graph to the new subgraph + - Supports selective forking via metadata.fork_options + - Tracks progress in real-time via SSE + **Returns:** - - Created subgraph details including its unique ID - - Subgraph ID format: `{parent_id}_{subgraph_name}` (e.g., kg1234567890abcdef_dev) + - Without fork: Immediate SubgraphResponse with created subgraph details + - With fork: Operation response with SSE monitoring endpoint + + **Subgraph ID format:** `{parent_id}_{subgraph_name}` (e.g., kg1234567890abcdef_dev) **Usage:** - Subgraphs share parent's credit pool @@ -243,7 +279,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, SubgraphResponse] + HTTPValidationError | SubgraphResponse """ return ( diff --git a/robosystems_client/api/subgraphs/delete_subgraph.py b/robosystems_client/api/subgraphs/delete_subgraph.py index 780b7d0..157c2bd 100644 --- a/robosystems_client/api/subgraphs/delete_subgraph.py +++ b/robosystems_client/api/subgraphs/delete_subgraph.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -33,8 +33,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, DeleteSubgraphResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | DeleteSubgraphResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = DeleteSubgraphResponse.from_dict(response.json()) @@ -76,8 +76,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, DeleteSubgraphResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | DeleteSubgraphResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -92,7 +92,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: DeleteSubgraphRequest, -) -> Response[Union[Any, DeleteSubgraphResponse, HTTPValidationError]]: +) -> Response[Any | DeleteSubgraphResponse | HTTPValidationError]: """Delete Subgraph Delete a subgraph database. @@ -130,7 +130,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, DeleteSubgraphResponse, HTTPValidationError]] + Response[Any | DeleteSubgraphResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -152,7 +152,7 @@ def sync( *, client: AuthenticatedClient, body: DeleteSubgraphRequest, -) -> Optional[Union[Any, DeleteSubgraphResponse, HTTPValidationError]]: +) -> Any | DeleteSubgraphResponse | HTTPValidationError | None: """Delete Subgraph Delete a subgraph database. @@ -190,7 +190,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, DeleteSubgraphResponse, HTTPValidationError] + Any | DeleteSubgraphResponse | HTTPValidationError """ return sync_detailed( @@ -207,7 +207,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: DeleteSubgraphRequest, -) -> Response[Union[Any, DeleteSubgraphResponse, HTTPValidationError]]: +) -> Response[Any | DeleteSubgraphResponse | HTTPValidationError]: """Delete Subgraph Delete a subgraph database. @@ -245,7 +245,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, DeleteSubgraphResponse, HTTPValidationError]] + Response[Any | DeleteSubgraphResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -265,7 +265,7 @@ async def asyncio( *, client: AuthenticatedClient, body: DeleteSubgraphRequest, -) -> Optional[Union[Any, DeleteSubgraphResponse, HTTPValidationError]]: +) -> Any | DeleteSubgraphResponse | HTTPValidationError | None: """Delete Subgraph Delete a subgraph database. @@ -303,7 +303,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, DeleteSubgraphResponse, HTTPValidationError] + Any | DeleteSubgraphResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/subgraphs/get_subgraph_info.py b/robosystems_client/api/subgraphs/get_subgraph_info.py index d029e7c..092b167 100644 --- a/robosystems_client/api/subgraphs/get_subgraph_info.py +++ b/robosystems_client/api/subgraphs/get_subgraph_info.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -23,8 +23,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, HTTPValidationError, SubgraphResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | HTTPValidationError | SubgraphResponse | None: if response.status_code == 200: response_200 = SubgraphResponse.from_dict(response.json()) @@ -62,8 +62,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, HTTPValidationError, SubgraphResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | HTTPValidationError | SubgraphResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -77,7 +77,7 @@ def sync_detailed( subgraph_name: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, HTTPValidationError, SubgraphResponse]]: +) -> Response[Any | HTTPValidationError | SubgraphResponse]: """Get Subgraph Details Get detailed information about a specific subgraph. @@ -114,7 +114,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError, SubgraphResponse]] + Response[Any | HTTPValidationError | SubgraphResponse] """ kwargs = _get_kwargs( @@ -134,7 +134,7 @@ def sync( subgraph_name: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, HTTPValidationError, SubgraphResponse]]: +) -> Any | HTTPValidationError | SubgraphResponse | None: """Get Subgraph Details Get detailed information about a specific subgraph. @@ -171,7 +171,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError, SubgraphResponse] + Any | HTTPValidationError | SubgraphResponse """ return sync_detailed( @@ -186,7 +186,7 @@ async def asyncio_detailed( subgraph_name: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, HTTPValidationError, SubgraphResponse]]: +) -> Response[Any | HTTPValidationError | SubgraphResponse]: """Get Subgraph Details Get detailed information about a specific subgraph. @@ -223,7 +223,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError, SubgraphResponse]] + Response[Any | HTTPValidationError | SubgraphResponse] """ kwargs = _get_kwargs( @@ -241,7 +241,7 @@ async def asyncio( subgraph_name: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, HTTPValidationError, SubgraphResponse]]: +) -> Any | HTTPValidationError | SubgraphResponse | None: """Get Subgraph Details Get detailed information about a specific subgraph. @@ -278,7 +278,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError, SubgraphResponse] + Any | HTTPValidationError | SubgraphResponse """ return ( diff --git a/robosystems_client/api/subgraphs/get_subgraph_quota.py b/robosystems_client/api/subgraphs/get_subgraph_quota.py index 0d8cd2b..10ec833 100644 --- a/robosystems_client/api/subgraphs/get_subgraph_quota.py +++ b/robosystems_client/api/subgraphs/get_subgraph_quota.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -22,8 +22,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, HTTPValidationError, SubgraphQuotaResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | HTTPValidationError | SubgraphQuotaResponse | None: if response.status_code == 200: response_200 = SubgraphQuotaResponse.from_dict(response.json()) @@ -57,8 +57,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, HTTPValidationError, SubgraphQuotaResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | HTTPValidationError | SubgraphQuotaResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -71,7 +71,7 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, HTTPValidationError, SubgraphQuotaResponse]]: +) -> Response[Any | HTTPValidationError | SubgraphQuotaResponse]: """Get Subgraph Quota Get subgraph quota and usage information for a parent graph. @@ -100,7 +100,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError, SubgraphQuotaResponse]] + Response[Any | HTTPValidationError | SubgraphQuotaResponse] """ kwargs = _get_kwargs( @@ -118,7 +118,7 @@ def sync( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, HTTPValidationError, SubgraphQuotaResponse]]: +) -> Any | HTTPValidationError | SubgraphQuotaResponse | None: """Get Subgraph Quota Get subgraph quota and usage information for a parent graph. @@ -147,7 +147,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError, SubgraphQuotaResponse] + Any | HTTPValidationError | SubgraphQuotaResponse """ return sync_detailed( @@ -160,7 +160,7 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, HTTPValidationError, SubgraphQuotaResponse]]: +) -> Response[Any | HTTPValidationError | SubgraphQuotaResponse]: """Get Subgraph Quota Get subgraph quota and usage information for a parent graph. @@ -189,7 +189,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError, SubgraphQuotaResponse]] + Response[Any | HTTPValidationError | SubgraphQuotaResponse] """ kwargs = _get_kwargs( @@ -205,7 +205,7 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, HTTPValidationError, SubgraphQuotaResponse]]: +) -> Any | HTTPValidationError | SubgraphQuotaResponse | None: """Get Subgraph Quota Get subgraph quota and usage information for a parent graph. @@ -234,7 +234,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError, SubgraphQuotaResponse] + Any | HTTPValidationError | SubgraphQuotaResponse """ return ( diff --git a/robosystems_client/api/subgraphs/list_subgraphs.py b/robosystems_client/api/subgraphs/list_subgraphs.py index 7e06b09..3a1192b 100644 --- a/robosystems_client/api/subgraphs/list_subgraphs.py +++ b/robosystems_client/api/subgraphs/list_subgraphs.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -22,8 +22,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, ListSubgraphsResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | ListSubgraphsResponse | None: if response.status_code == 200: response_200 = ListSubgraphsResponse.from_dict(response.json()) @@ -41,8 +41,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, ListSubgraphsResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | ListSubgraphsResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -55,7 +55,7 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[HTTPValidationError, ListSubgraphsResponse]]: +) -> Response[HTTPValidationError | ListSubgraphsResponse]: """List Subgraphs List all subgraphs for a parent graph. @@ -77,7 +77,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, ListSubgraphsResponse]] + Response[HTTPValidationError | ListSubgraphsResponse] """ kwargs = _get_kwargs( @@ -95,7 +95,7 @@ def sync( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[HTTPValidationError, ListSubgraphsResponse]]: +) -> HTTPValidationError | ListSubgraphsResponse | None: """List Subgraphs List all subgraphs for a parent graph. @@ -117,7 +117,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, ListSubgraphsResponse] + HTTPValidationError | ListSubgraphsResponse """ return sync_detailed( @@ -130,7 +130,7 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[HTTPValidationError, ListSubgraphsResponse]]: +) -> Response[HTTPValidationError | ListSubgraphsResponse]: """List Subgraphs List all subgraphs for a parent graph. @@ -152,7 +152,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, ListSubgraphsResponse]] + Response[HTTPValidationError | ListSubgraphsResponse] """ kwargs = _get_kwargs( @@ -168,7 +168,7 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[HTTPValidationError, ListSubgraphsResponse]]: +) -> HTTPValidationError | ListSubgraphsResponse | None: """List Subgraphs List all subgraphs for a parent graph. @@ -190,7 +190,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, ListSubgraphsResponse] + HTTPValidationError | ListSubgraphsResponse """ return ( diff --git a/robosystems_client/api/subscriptions/create_repository_subscription.py b/robosystems_client/api/subscriptions/create_repository_subscription.py index a6850d7..5745431 100644 --- a/robosystems_client/api/subscriptions/create_repository_subscription.py +++ b/robosystems_client/api/subscriptions/create_repository_subscription.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -34,8 +34,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, GraphSubscriptionResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | GraphSubscriptionResponse | HTTPValidationError | None: if response.status_code == 201: response_201 = GraphSubscriptionResponse.from_dict(response.json()) @@ -61,8 +61,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, GraphSubscriptionResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | GraphSubscriptionResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -76,7 +76,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: CreateRepositorySubscriptionRequest, -) -> Response[Union[Any, GraphSubscriptionResponse, HTTPValidationError]]: +) -> Response[Any | GraphSubscriptionResponse | HTTPValidationError]: """Create Repository Subscription Create a new subscription to a shared repository. @@ -95,7 +95,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, GraphSubscriptionResponse, HTTPValidationError]] + Response[Any | GraphSubscriptionResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -115,7 +115,7 @@ def sync( *, client: AuthenticatedClient, body: CreateRepositorySubscriptionRequest, -) -> Optional[Union[Any, GraphSubscriptionResponse, HTTPValidationError]]: +) -> Any | GraphSubscriptionResponse | HTTPValidationError | None: """Create Repository Subscription Create a new subscription to a shared repository. @@ -134,7 +134,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, GraphSubscriptionResponse, HTTPValidationError] + Any | GraphSubscriptionResponse | HTTPValidationError """ return sync_detailed( @@ -149,7 +149,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: CreateRepositorySubscriptionRequest, -) -> Response[Union[Any, GraphSubscriptionResponse, HTTPValidationError]]: +) -> Response[Any | GraphSubscriptionResponse | HTTPValidationError]: """Create Repository Subscription Create a new subscription to a shared repository. @@ -168,7 +168,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, GraphSubscriptionResponse, HTTPValidationError]] + Response[Any | GraphSubscriptionResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -186,7 +186,7 @@ async def asyncio( *, client: AuthenticatedClient, body: CreateRepositorySubscriptionRequest, -) -> Optional[Union[Any, GraphSubscriptionResponse, HTTPValidationError]]: +) -> Any | GraphSubscriptionResponse | HTTPValidationError | None: """Create Repository Subscription Create a new subscription to a shared repository. @@ -205,7 +205,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, GraphSubscriptionResponse, HTTPValidationError] + Any | GraphSubscriptionResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/subscriptions/get_graph_subscription.py b/robosystems_client/api/subscriptions/get_graph_subscription.py index 147cbe1..e14e28f 100644 --- a/robosystems_client/api/subscriptions/get_graph_subscription.py +++ b/robosystems_client/api/subscriptions/get_graph_subscription.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -22,8 +22,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, GraphSubscriptionResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | GraphSubscriptionResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = GraphSubscriptionResponse.from_dict(response.json()) @@ -45,8 +45,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, GraphSubscriptionResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | GraphSubscriptionResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -59,7 +59,7 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, GraphSubscriptionResponse, HTTPValidationError]]: +) -> Response[Any | GraphSubscriptionResponse | HTTPValidationError]: """Get Subscription Get subscription details for a graph or shared repository. @@ -79,7 +79,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, GraphSubscriptionResponse, HTTPValidationError]] + Response[Any | GraphSubscriptionResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -97,7 +97,7 @@ def sync( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, GraphSubscriptionResponse, HTTPValidationError]]: +) -> Any | GraphSubscriptionResponse | HTTPValidationError | None: """Get Subscription Get subscription details for a graph or shared repository. @@ -117,7 +117,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, GraphSubscriptionResponse, HTTPValidationError] + Any | GraphSubscriptionResponse | HTTPValidationError """ return sync_detailed( @@ -130,7 +130,7 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, GraphSubscriptionResponse, HTTPValidationError]]: +) -> Response[Any | GraphSubscriptionResponse | HTTPValidationError]: """Get Subscription Get subscription details for a graph or shared repository. @@ -150,7 +150,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, GraphSubscriptionResponse, HTTPValidationError]] + Response[Any | GraphSubscriptionResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -166,7 +166,7 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, GraphSubscriptionResponse, HTTPValidationError]]: +) -> Any | GraphSubscriptionResponse | HTTPValidationError | None: """Get Subscription Get subscription details for a graph or shared repository. @@ -186,7 +186,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, GraphSubscriptionResponse, HTTPValidationError] + Any | GraphSubscriptionResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/subscriptions/upgrade_subscription.py b/robosystems_client/api/subscriptions/upgrade_subscription.py index 4de527f..a491282 100644 --- a/robosystems_client/api/subscriptions/upgrade_subscription.py +++ b/robosystems_client/api/subscriptions/upgrade_subscription.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -32,8 +32,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, GraphSubscriptionResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | GraphSubscriptionResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = GraphSubscriptionResponse.from_dict(response.json()) @@ -55,8 +55,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, GraphSubscriptionResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | GraphSubscriptionResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -70,7 +70,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: UpgradeSubscriptionRequest, -) -> Response[Union[Any, GraphSubscriptionResponse, HTTPValidationError]]: +) -> Response[Any | GraphSubscriptionResponse | HTTPValidationError]: """Upgrade Subscription Upgrade a subscription to a different plan. @@ -87,7 +87,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, GraphSubscriptionResponse, HTTPValidationError]] + Response[Any | GraphSubscriptionResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -107,7 +107,7 @@ def sync( *, client: AuthenticatedClient, body: UpgradeSubscriptionRequest, -) -> Optional[Union[Any, GraphSubscriptionResponse, HTTPValidationError]]: +) -> Any | GraphSubscriptionResponse | HTTPValidationError | None: """Upgrade Subscription Upgrade a subscription to a different plan. @@ -124,7 +124,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, GraphSubscriptionResponse, HTTPValidationError] + Any | GraphSubscriptionResponse | HTTPValidationError """ return sync_detailed( @@ -139,7 +139,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: UpgradeSubscriptionRequest, -) -> Response[Union[Any, GraphSubscriptionResponse, HTTPValidationError]]: +) -> Response[Any | GraphSubscriptionResponse | HTTPValidationError]: """Upgrade Subscription Upgrade a subscription to a different plan. @@ -156,7 +156,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, GraphSubscriptionResponse, HTTPValidationError]] + Response[Any | GraphSubscriptionResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -174,7 +174,7 @@ async def asyncio( *, client: AuthenticatedClient, body: UpgradeSubscriptionRequest, -) -> Optional[Union[Any, GraphSubscriptionResponse, HTTPValidationError]]: +) -> Any | GraphSubscriptionResponse | HTTPValidationError | None: """Upgrade Subscription Upgrade a subscription to a different plan. @@ -191,7 +191,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, GraphSubscriptionResponse, HTTPValidationError] + Any | GraphSubscriptionResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/tables/delete_file.py b/robosystems_client/api/tables/delete_file.py deleted file mode 100644 index e50096f..0000000 --- a/robosystems_client/api/tables/delete_file.py +++ /dev/null @@ -1,317 +0,0 @@ -from http import HTTPStatus -from typing import Any, Optional, Union, cast - -import httpx - -from ... import errors -from ...client import AuthenticatedClient, Client -from ...models.delete_file_response import DeleteFileResponse -from ...models.error_response import ErrorResponse -from ...models.http_validation_error import HTTPValidationError -from ...types import Response - - -def _get_kwargs( - graph_id: str, - file_id: str, -) -> dict[str, Any]: - _kwargs: dict[str, Any] = { - "method": "delete", - "url": f"/v1/graphs/{graph_id}/tables/files/{file_id}", - } - - return _kwargs - - -def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, DeleteFileResponse, ErrorResponse, HTTPValidationError]]: - if response.status_code == 200: - response_200 = DeleteFileResponse.from_dict(response.json()) - - return response_200 - - if response.status_code == 401: - response_401 = cast(Any, None) - return response_401 - - if response.status_code == 403: - response_403 = ErrorResponse.from_dict(response.json()) - - return response_403 - - if response.status_code == 404: - response_404 = ErrorResponse.from_dict(response.json()) - - return response_404 - - if response.status_code == 422: - response_422 = HTTPValidationError.from_dict(response.json()) - - return response_422 - - if response.status_code == 500: - response_500 = cast(Any, None) - return response_500 - - if client.raise_on_unexpected_status: - raise errors.UnexpectedStatus(response.status_code, response.content) - else: - return None - - -def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, DeleteFileResponse, ErrorResponse, HTTPValidationError]]: - return Response( - status_code=HTTPStatus(response.status_code), - content=response.content, - headers=response.headers, - parsed=_parse_response(client=client, response=response), - ) - - -def sync_detailed( - graph_id: str, - file_id: str, - *, - client: AuthenticatedClient, -) -> Response[Union[Any, DeleteFileResponse, ErrorResponse, HTTPValidationError]]: - """Delete File from Staging - - Delete a file from S3 storage and database tracking. - - Remove unwanted, duplicate, or incorrect files from staging tables before ingestion. - The file is deleted from both S3 and database tracking, and table statistics - are automatically recalculated. - - **Use Cases:** - - Remove duplicate uploads - - Delete files with incorrect data - - Clean up failed uploads - - Fix data quality issues before ingestion - - Manage storage usage - - **What Happens:** - 1. File deleted from S3 storage - 2. Database tracking record removed - 3. Table statistics recalculated (file count, size, row count) - 4. DuckDB automatically excludes file from future queries - - **Security:** - - Write access required (verified via auth) - - Shared repositories block file deletions - - Full audit trail of deletion operations - - Cannot delete after ingestion to graph - - **Important Notes:** - - Delete files before ingestion for best results - - Table statistics update automatically - - No need to refresh DuckDB - exclusion is automatic - - Consider re-uploading corrected version after deletion - - File deletion is included - no credit consumption - - Args: - graph_id (str): - file_id (str): File ID - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, DeleteFileResponse, ErrorResponse, HTTPValidationError]] - """ - - kwargs = _get_kwargs( - graph_id=graph_id, - file_id=file_id, - ) - - response = client.get_httpx_client().request( - **kwargs, - ) - - return _build_response(client=client, response=response) - - -def sync( - graph_id: str, - file_id: str, - *, - client: AuthenticatedClient, -) -> Optional[Union[Any, DeleteFileResponse, ErrorResponse, HTTPValidationError]]: - """Delete File from Staging - - Delete a file from S3 storage and database tracking. - - Remove unwanted, duplicate, or incorrect files from staging tables before ingestion. - The file is deleted from both S3 and database tracking, and table statistics - are automatically recalculated. - - **Use Cases:** - - Remove duplicate uploads - - Delete files with incorrect data - - Clean up failed uploads - - Fix data quality issues before ingestion - - Manage storage usage - - **What Happens:** - 1. File deleted from S3 storage - 2. Database tracking record removed - 3. Table statistics recalculated (file count, size, row count) - 4. DuckDB automatically excludes file from future queries - - **Security:** - - Write access required (verified via auth) - - Shared repositories block file deletions - - Full audit trail of deletion operations - - Cannot delete after ingestion to graph - - **Important Notes:** - - Delete files before ingestion for best results - - Table statistics update automatically - - No need to refresh DuckDB - exclusion is automatic - - Consider re-uploading corrected version after deletion - - File deletion is included - no credit consumption - - Args: - graph_id (str): - file_id (str): File ID - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, DeleteFileResponse, ErrorResponse, HTTPValidationError] - """ - - return sync_detailed( - graph_id=graph_id, - file_id=file_id, - client=client, - ).parsed - - -async def asyncio_detailed( - graph_id: str, - file_id: str, - *, - client: AuthenticatedClient, -) -> Response[Union[Any, DeleteFileResponse, ErrorResponse, HTTPValidationError]]: - """Delete File from Staging - - Delete a file from S3 storage and database tracking. - - Remove unwanted, duplicate, or incorrect files from staging tables before ingestion. - The file is deleted from both S3 and database tracking, and table statistics - are automatically recalculated. - - **Use Cases:** - - Remove duplicate uploads - - Delete files with incorrect data - - Clean up failed uploads - - Fix data quality issues before ingestion - - Manage storage usage - - **What Happens:** - 1. File deleted from S3 storage - 2. Database tracking record removed - 3. Table statistics recalculated (file count, size, row count) - 4. DuckDB automatically excludes file from future queries - - **Security:** - - Write access required (verified via auth) - - Shared repositories block file deletions - - Full audit trail of deletion operations - - Cannot delete after ingestion to graph - - **Important Notes:** - - Delete files before ingestion for best results - - Table statistics update automatically - - No need to refresh DuckDB - exclusion is automatic - - Consider re-uploading corrected version after deletion - - File deletion is included - no credit consumption - - Args: - graph_id (str): - file_id (str): File ID - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, DeleteFileResponse, ErrorResponse, HTTPValidationError]] - """ - - kwargs = _get_kwargs( - graph_id=graph_id, - file_id=file_id, - ) - - response = await client.get_async_httpx_client().request(**kwargs) - - return _build_response(client=client, response=response) - - -async def asyncio( - graph_id: str, - file_id: str, - *, - client: AuthenticatedClient, -) -> Optional[Union[Any, DeleteFileResponse, ErrorResponse, HTTPValidationError]]: - """Delete File from Staging - - Delete a file from S3 storage and database tracking. - - Remove unwanted, duplicate, or incorrect files from staging tables before ingestion. - The file is deleted from both S3 and database tracking, and table statistics - are automatically recalculated. - - **Use Cases:** - - Remove duplicate uploads - - Delete files with incorrect data - - Clean up failed uploads - - Fix data quality issues before ingestion - - Manage storage usage - - **What Happens:** - 1. File deleted from S3 storage - 2. Database tracking record removed - 3. Table statistics recalculated (file count, size, row count) - 4. DuckDB automatically excludes file from future queries - - **Security:** - - Write access required (verified via auth) - - Shared repositories block file deletions - - Full audit trail of deletion operations - - Cannot delete after ingestion to graph - - **Important Notes:** - - Delete files before ingestion for best results - - Table statistics update automatically - - No need to refresh DuckDB - exclusion is automatic - - Consider re-uploading corrected version after deletion - - File deletion is included - no credit consumption - - Args: - graph_id (str): - file_id (str): File ID - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, DeleteFileResponse, ErrorResponse, HTTPValidationError] - """ - - return ( - await asyncio_detailed( - graph_id=graph_id, - file_id=file_id, - client=client, - ) - ).parsed diff --git a/robosystems_client/api/tables/get_file_info.py b/robosystems_client/api/tables/get_file_info.py deleted file mode 100644 index 1601b64..0000000 --- a/robosystems_client/api/tables/get_file_info.py +++ /dev/null @@ -1,249 +0,0 @@ -from http import HTTPStatus -from typing import Any, Optional, Union, cast - -import httpx - -from ... import errors -from ...client import AuthenticatedClient, Client -from ...models.error_response import ErrorResponse -from ...models.get_file_info_response import GetFileInfoResponse -from ...models.http_validation_error import HTTPValidationError -from ...types import Response - - -def _get_kwargs( - graph_id: str, - file_id: str, -) -> dict[str, Any]: - _kwargs: dict[str, Any] = { - "method": "get", - "url": f"/v1/graphs/{graph_id}/tables/files/{file_id}", - } - - return _kwargs - - -def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, ErrorResponse, GetFileInfoResponse, HTTPValidationError]]: - if response.status_code == 200: - response_200 = GetFileInfoResponse.from_dict(response.json()) - - return response_200 - - if response.status_code == 401: - response_401 = cast(Any, None) - return response_401 - - if response.status_code == 403: - response_403 = ErrorResponse.from_dict(response.json()) - - return response_403 - - if response.status_code == 404: - response_404 = ErrorResponse.from_dict(response.json()) - - return response_404 - - if response.status_code == 422: - response_422 = HTTPValidationError.from_dict(response.json()) - - return response_422 - - if client.raise_on_unexpected_status: - raise errors.UnexpectedStatus(response.status_code, response.content) - else: - return None - - -def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, ErrorResponse, GetFileInfoResponse, HTTPValidationError]]: - return Response( - status_code=HTTPStatus(response.status_code), - content=response.content, - headers=response.headers, - parsed=_parse_response(client=client, response=response), - ) - - -def sync_detailed( - graph_id: str, - file_id: str, - *, - client: AuthenticatedClient, -) -> Response[Union[Any, ErrorResponse, GetFileInfoResponse, HTTPValidationError]]: - """Get File Information - - Get detailed information about a specific file. - - Retrieve comprehensive metadata for a single file, including upload status, - size, row count, and timestamps. Useful for validating individual files - before ingestion. - - **Use Cases:** - - Validate file upload completion - - Check file metadata before ingestion - - Debug upload issues - - Verify file format and size - - Track file lifecycle - - **Note:** - File info retrieval is included - no credit consumption - - Args: - graph_id (str): - file_id (str): File ID - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, ErrorResponse, GetFileInfoResponse, HTTPValidationError]] - """ - - kwargs = _get_kwargs( - graph_id=graph_id, - file_id=file_id, - ) - - response = client.get_httpx_client().request( - **kwargs, - ) - - return _build_response(client=client, response=response) - - -def sync( - graph_id: str, - file_id: str, - *, - client: AuthenticatedClient, -) -> Optional[Union[Any, ErrorResponse, GetFileInfoResponse, HTTPValidationError]]: - """Get File Information - - Get detailed information about a specific file. - - Retrieve comprehensive metadata for a single file, including upload status, - size, row count, and timestamps. Useful for validating individual files - before ingestion. - - **Use Cases:** - - Validate file upload completion - - Check file metadata before ingestion - - Debug upload issues - - Verify file format and size - - Track file lifecycle - - **Note:** - File info retrieval is included - no credit consumption - - Args: - graph_id (str): - file_id (str): File ID - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, ErrorResponse, GetFileInfoResponse, HTTPValidationError] - """ - - return sync_detailed( - graph_id=graph_id, - file_id=file_id, - client=client, - ).parsed - - -async def asyncio_detailed( - graph_id: str, - file_id: str, - *, - client: AuthenticatedClient, -) -> Response[Union[Any, ErrorResponse, GetFileInfoResponse, HTTPValidationError]]: - """Get File Information - - Get detailed information about a specific file. - - Retrieve comprehensive metadata for a single file, including upload status, - size, row count, and timestamps. Useful for validating individual files - before ingestion. - - **Use Cases:** - - Validate file upload completion - - Check file metadata before ingestion - - Debug upload issues - - Verify file format and size - - Track file lifecycle - - **Note:** - File info retrieval is included - no credit consumption - - Args: - graph_id (str): - file_id (str): File ID - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, ErrorResponse, GetFileInfoResponse, HTTPValidationError]] - """ - - kwargs = _get_kwargs( - graph_id=graph_id, - file_id=file_id, - ) - - response = await client.get_async_httpx_client().request(**kwargs) - - return _build_response(client=client, response=response) - - -async def asyncio( - graph_id: str, - file_id: str, - *, - client: AuthenticatedClient, -) -> Optional[Union[Any, ErrorResponse, GetFileInfoResponse, HTTPValidationError]]: - """Get File Information - - Get detailed information about a specific file. - - Retrieve comprehensive metadata for a single file, including upload status, - size, row count, and timestamps. Useful for validating individual files - before ingestion. - - **Use Cases:** - - Validate file upload completion - - Check file metadata before ingestion - - Debug upload issues - - Verify file format and size - - Track file lifecycle - - **Note:** - File info retrieval is included - no credit consumption - - Args: - graph_id (str): - file_id (str): File ID - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, ErrorResponse, GetFileInfoResponse, HTTPValidationError] - """ - - return ( - await asyncio_detailed( - graph_id=graph_id, - file_id=file_id, - client=client, - ) - ).parsed diff --git a/robosystems_client/api/tables/get_upload_url.py b/robosystems_client/api/tables/get_upload_url.py deleted file mode 100644 index 6d7f7fd..0000000 --- a/robosystems_client/api/tables/get_upload_url.py +++ /dev/null @@ -1,380 +0,0 @@ -from http import HTTPStatus -from typing import Any, Optional, Union, cast - -import httpx - -from ... import errors -from ...client import AuthenticatedClient, Client -from ...models.error_response import ErrorResponse -from ...models.file_upload_request import FileUploadRequest -from ...models.file_upload_response import FileUploadResponse -from ...models.http_validation_error import HTTPValidationError -from ...types import Response - - -def _get_kwargs( - graph_id: str, - table_name: str, - *, - body: FileUploadRequest, -) -> dict[str, Any]: - headers: dict[str, Any] = {} - - _kwargs: dict[str, Any] = { - "method": "post", - "url": f"/v1/graphs/{graph_id}/tables/{table_name}/files", - } - - _kwargs["json"] = body.to_dict() - - headers["Content-Type"] = "application/json" - - _kwargs["headers"] = headers - return _kwargs - - -def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, ErrorResponse, FileUploadResponse, HTTPValidationError]]: - if response.status_code == 200: - response_200 = FileUploadResponse.from_dict(response.json()) - - return response_200 - - if response.status_code == 400: - response_400 = ErrorResponse.from_dict(response.json()) - - return response_400 - - if response.status_code == 401: - response_401 = cast(Any, None) - return response_401 - - if response.status_code == 403: - response_403 = ErrorResponse.from_dict(response.json()) - - return response_403 - - if response.status_code == 404: - response_404 = ErrorResponse.from_dict(response.json()) - - return response_404 - - if response.status_code == 422: - response_422 = HTTPValidationError.from_dict(response.json()) - - return response_422 - - if response.status_code == 500: - response_500 = cast(Any, None) - return response_500 - - if client.raise_on_unexpected_status: - raise errors.UnexpectedStatus(response.status_code, response.content) - else: - return None - - -def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, ErrorResponse, FileUploadResponse, HTTPValidationError]]: - return Response( - status_code=HTTPStatus(response.status_code), - content=response.content, - headers=response.headers, - parsed=_parse_response(client=client, response=response), - ) - - -def sync_detailed( - graph_id: str, - table_name: str, - *, - client: AuthenticatedClient, - body: FileUploadRequest, -) -> Response[Union[Any, ErrorResponse, FileUploadResponse, HTTPValidationError]]: - r"""Get File Upload URL - - Generate a presigned S3 URL for secure file upload. - - Initiates file upload to a staging table by generating a secure, time-limited - presigned S3 URL. Files are uploaded directly to S3, bypassing the API for - optimal performance. - - **Upload Workflow:** - 1. Call this endpoint to get presigned URL - 2. PUT file directly to S3 URL - 3. Call PATCH /tables/files/{file_id} with status='uploaded' - 4. Backend validates file and calculates metrics - 5. File ready for ingestion - - **Supported Formats:** - - Parquet (`application/x-parquet` with `.parquet` extension) - - CSV (`text/csv` with `.csv` extension) - - JSON (`application/json` with `.json` extension) - - **Validation:** - - File extension must match content type - - File name 1-255 characters - - No path traversal characters (.. / \) - - Auto-creates table if it doesn't exist - - **Auto-Table Creation:** - Tables are automatically created on first file upload with type inferred from name - (e.g., \"Transaction\" → relationship) and empty schema populated during ingestion. - - **Subgraph Support:** - This endpoint accepts both parent graph IDs and subgraph IDs. - - Parent graph: Use `graph_id` like `kg0123456789abcdef` - - Subgraph: Use full subgraph ID like `kg0123456789abcdef_dev` - Each subgraph has completely isolated S3 staging areas and tables. Files uploaded - to one subgraph do not appear in other subgraphs. - - **Important Notes:** - - Presigned URLs expire (default: 1 hour) - - Use appropriate Content-Type header when uploading to S3 - - File extension must match content type - - Upload URL generation is included - no credit consumption - - Args: - graph_id (str): - table_name (str): Table name - body (FileUploadRequest): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, ErrorResponse, FileUploadResponse, HTTPValidationError]] - """ - - kwargs = _get_kwargs( - graph_id=graph_id, - table_name=table_name, - body=body, - ) - - response = client.get_httpx_client().request( - **kwargs, - ) - - return _build_response(client=client, response=response) - - -def sync( - graph_id: str, - table_name: str, - *, - client: AuthenticatedClient, - body: FileUploadRequest, -) -> Optional[Union[Any, ErrorResponse, FileUploadResponse, HTTPValidationError]]: - r"""Get File Upload URL - - Generate a presigned S3 URL for secure file upload. - - Initiates file upload to a staging table by generating a secure, time-limited - presigned S3 URL. Files are uploaded directly to S3, bypassing the API for - optimal performance. - - **Upload Workflow:** - 1. Call this endpoint to get presigned URL - 2. PUT file directly to S3 URL - 3. Call PATCH /tables/files/{file_id} with status='uploaded' - 4. Backend validates file and calculates metrics - 5. File ready for ingestion - - **Supported Formats:** - - Parquet (`application/x-parquet` with `.parquet` extension) - - CSV (`text/csv` with `.csv` extension) - - JSON (`application/json` with `.json` extension) - - **Validation:** - - File extension must match content type - - File name 1-255 characters - - No path traversal characters (.. / \) - - Auto-creates table if it doesn't exist - - **Auto-Table Creation:** - Tables are automatically created on first file upload with type inferred from name - (e.g., \"Transaction\" → relationship) and empty schema populated during ingestion. - - **Subgraph Support:** - This endpoint accepts both parent graph IDs and subgraph IDs. - - Parent graph: Use `graph_id` like `kg0123456789abcdef` - - Subgraph: Use full subgraph ID like `kg0123456789abcdef_dev` - Each subgraph has completely isolated S3 staging areas and tables. Files uploaded - to one subgraph do not appear in other subgraphs. - - **Important Notes:** - - Presigned URLs expire (default: 1 hour) - - Use appropriate Content-Type header when uploading to S3 - - File extension must match content type - - Upload URL generation is included - no credit consumption - - Args: - graph_id (str): - table_name (str): Table name - body (FileUploadRequest): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, ErrorResponse, FileUploadResponse, HTTPValidationError] - """ - - return sync_detailed( - graph_id=graph_id, - table_name=table_name, - client=client, - body=body, - ).parsed - - -async def asyncio_detailed( - graph_id: str, - table_name: str, - *, - client: AuthenticatedClient, - body: FileUploadRequest, -) -> Response[Union[Any, ErrorResponse, FileUploadResponse, HTTPValidationError]]: - r"""Get File Upload URL - - Generate a presigned S3 URL for secure file upload. - - Initiates file upload to a staging table by generating a secure, time-limited - presigned S3 URL. Files are uploaded directly to S3, bypassing the API for - optimal performance. - - **Upload Workflow:** - 1. Call this endpoint to get presigned URL - 2. PUT file directly to S3 URL - 3. Call PATCH /tables/files/{file_id} with status='uploaded' - 4. Backend validates file and calculates metrics - 5. File ready for ingestion - - **Supported Formats:** - - Parquet (`application/x-parquet` with `.parquet` extension) - - CSV (`text/csv` with `.csv` extension) - - JSON (`application/json` with `.json` extension) - - **Validation:** - - File extension must match content type - - File name 1-255 characters - - No path traversal characters (.. / \) - - Auto-creates table if it doesn't exist - - **Auto-Table Creation:** - Tables are automatically created on first file upload with type inferred from name - (e.g., \"Transaction\" → relationship) and empty schema populated during ingestion. - - **Subgraph Support:** - This endpoint accepts both parent graph IDs and subgraph IDs. - - Parent graph: Use `graph_id` like `kg0123456789abcdef` - - Subgraph: Use full subgraph ID like `kg0123456789abcdef_dev` - Each subgraph has completely isolated S3 staging areas and tables. Files uploaded - to one subgraph do not appear in other subgraphs. - - **Important Notes:** - - Presigned URLs expire (default: 1 hour) - - Use appropriate Content-Type header when uploading to S3 - - File extension must match content type - - Upload URL generation is included - no credit consumption - - Args: - graph_id (str): - table_name (str): Table name - body (FileUploadRequest): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, ErrorResponse, FileUploadResponse, HTTPValidationError]] - """ - - kwargs = _get_kwargs( - graph_id=graph_id, - table_name=table_name, - body=body, - ) - - response = await client.get_async_httpx_client().request(**kwargs) - - return _build_response(client=client, response=response) - - -async def asyncio( - graph_id: str, - table_name: str, - *, - client: AuthenticatedClient, - body: FileUploadRequest, -) -> Optional[Union[Any, ErrorResponse, FileUploadResponse, HTTPValidationError]]: - r"""Get File Upload URL - - Generate a presigned S3 URL for secure file upload. - - Initiates file upload to a staging table by generating a secure, time-limited - presigned S3 URL. Files are uploaded directly to S3, bypassing the API for - optimal performance. - - **Upload Workflow:** - 1. Call this endpoint to get presigned URL - 2. PUT file directly to S3 URL - 3. Call PATCH /tables/files/{file_id} with status='uploaded' - 4. Backend validates file and calculates metrics - 5. File ready for ingestion - - **Supported Formats:** - - Parquet (`application/x-parquet` with `.parquet` extension) - - CSV (`text/csv` with `.csv` extension) - - JSON (`application/json` with `.json` extension) - - **Validation:** - - File extension must match content type - - File name 1-255 characters - - No path traversal characters (.. / \) - - Auto-creates table if it doesn't exist - - **Auto-Table Creation:** - Tables are automatically created on first file upload with type inferred from name - (e.g., \"Transaction\" → relationship) and empty schema populated during ingestion. - - **Subgraph Support:** - This endpoint accepts both parent graph IDs and subgraph IDs. - - Parent graph: Use `graph_id` like `kg0123456789abcdef` - - Subgraph: Use full subgraph ID like `kg0123456789abcdef_dev` - Each subgraph has completely isolated S3 staging areas and tables. Files uploaded - to one subgraph do not appear in other subgraphs. - - **Important Notes:** - - Presigned URLs expire (default: 1 hour) - - Use appropriate Content-Type header when uploading to S3 - - File extension must match content type - - Upload URL generation is included - no credit consumption - - Args: - graph_id (str): - table_name (str): Table name - body (FileUploadRequest): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, ErrorResponse, FileUploadResponse, HTTPValidationError] - """ - - return ( - await asyncio_detailed( - graph_id=graph_id, - table_name=table_name, - client=client, - body=body, - ) - ).parsed diff --git a/robosystems_client/api/tables/ingest_tables.py b/robosystems_client/api/tables/ingest_tables.py deleted file mode 100644 index 9d1d2a1..0000000 --- a/robosystems_client/api/tables/ingest_tables.py +++ /dev/null @@ -1,456 +0,0 @@ -from http import HTTPStatus -from typing import Any, Optional, Union, cast - -import httpx - -from ... import errors -from ...client import AuthenticatedClient, Client -from ...models.bulk_ingest_request import BulkIngestRequest -from ...models.bulk_ingest_response import BulkIngestResponse -from ...models.error_response import ErrorResponse -from ...models.http_validation_error import HTTPValidationError -from ...types import Response - - -def _get_kwargs( - graph_id: str, - *, - body: BulkIngestRequest, -) -> dict[str, Any]: - headers: dict[str, Any] = {} - - _kwargs: dict[str, Any] = { - "method": "post", - "url": f"/v1/graphs/{graph_id}/tables/ingest", - } - - _kwargs["json"] = body.to_dict() - - headers["Content-Type"] = "application/json" - - _kwargs["headers"] = headers - return _kwargs - - -def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, BulkIngestResponse, ErrorResponse, HTTPValidationError]]: - if response.status_code == 200: - response_200 = BulkIngestResponse.from_dict(response.json()) - - return response_200 - - if response.status_code == 401: - response_401 = cast(Any, None) - return response_401 - - if response.status_code == 403: - response_403 = ErrorResponse.from_dict(response.json()) - - return response_403 - - if response.status_code == 404: - response_404 = ErrorResponse.from_dict(response.json()) - - return response_404 - - if response.status_code == 409: - response_409 = ErrorResponse.from_dict(response.json()) - - return response_409 - - if response.status_code == 422: - response_422 = HTTPValidationError.from_dict(response.json()) - - return response_422 - - if response.status_code == 500: - response_500 = ErrorResponse.from_dict(response.json()) - - return response_500 - - if client.raise_on_unexpected_status: - raise errors.UnexpectedStatus(response.status_code, response.content) - else: - return None - - -def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, BulkIngestResponse, ErrorResponse, HTTPValidationError]]: - return Response( - status_code=HTTPStatus(response.status_code), - content=response.content, - headers=response.headers, - parsed=_parse_response(client=client, response=response), - ) - - -def sync_detailed( - graph_id: str, - *, - client: AuthenticatedClient, - body: BulkIngestRequest, -) -> Response[Union[Any, BulkIngestResponse, ErrorResponse, HTTPValidationError]]: - """Ingest Tables to Graph - - Load all files from S3 into DuckDB staging tables and ingest into Kuzu graph database. - - Orchestrates the complete data pipeline from S3 staging files into the Kuzu graph database. - Processes all tables in a single bulk operation with comprehensive error handling and metrics. - - **Use Cases:** - - Initial graph population from uploaded data - - Incremental data updates with new files - - Complete database rebuild from source files - - Recovery from failed ingestion attempts - - **Workflow:** - 1. Upload data files via `POST /tables/{table_name}/files` - 2. Files are validated and marked as 'uploaded' - 3. Trigger ingestion: `POST /tables/ingest` - 4. DuckDB staging tables created from S3 patterns - 5. Data copied from DuckDB to Kuzu - 6. Per-table results and metrics returned - - **Rebuild Feature:** - Setting `rebuild=true` regenerates the entire graph database from scratch: - - Deletes existing Kuzu database - - Recreates with fresh schema from active GraphSchema - - Ingests all data files - - Safe operation - S3 is source of truth - - Useful for schema changes or data corrections - - Graph marked as 'rebuilding' during process - - **Error Handling:** - - Per-table error isolation with `ignore_errors` flag - - Partial success support (some tables succeed, some fail) - - Detailed error reporting per table - - Graph status tracking throughout process - - Automatic failure recovery and cleanup - - **Performance:** - - Processes all tables in sequence - - Each table timed independently - - Total execution metrics provided - - Scales to thousands of files - - Optimized for large datasets - - **Concurrency Control:** - Only one ingestion can run per graph at a time. If another ingestion is in progress, - you'll receive a 409 Conflict error. The distributed lock automatically expires after - the configured TTL (default: 1 hour) to prevent deadlocks from failed ingestions. - - **Subgraph Support:** - This endpoint accepts both parent graph IDs and subgraph IDs. - - Parent graph: Use `graph_id` like `kg0123456789abcdef` - - Subgraph: Use full subgraph ID like `kg0123456789abcdef_dev` - Each subgraph has independent staging tables and graph data. Ingestion operates - on the specified graph/subgraph only and does not affect other subgraphs. - - **Important Notes:** - - Only files with 'uploaded' status are processed - - Tables with no uploaded files are skipped - - Use `ignore_errors=false` for strict validation - - Monitor progress via per-table results - - Check graph metadata for rebuild status - - Wait for current ingestion to complete before starting another - - Table ingestion is included - no credit consumption - - Args: - graph_id (str): - body (BulkIngestRequest): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, BulkIngestResponse, ErrorResponse, HTTPValidationError]] - """ - - kwargs = _get_kwargs( - graph_id=graph_id, - body=body, - ) - - response = client.get_httpx_client().request( - **kwargs, - ) - - return _build_response(client=client, response=response) - - -def sync( - graph_id: str, - *, - client: AuthenticatedClient, - body: BulkIngestRequest, -) -> Optional[Union[Any, BulkIngestResponse, ErrorResponse, HTTPValidationError]]: - """Ingest Tables to Graph - - Load all files from S3 into DuckDB staging tables and ingest into Kuzu graph database. - - Orchestrates the complete data pipeline from S3 staging files into the Kuzu graph database. - Processes all tables in a single bulk operation with comprehensive error handling and metrics. - - **Use Cases:** - - Initial graph population from uploaded data - - Incremental data updates with new files - - Complete database rebuild from source files - - Recovery from failed ingestion attempts - - **Workflow:** - 1. Upload data files via `POST /tables/{table_name}/files` - 2. Files are validated and marked as 'uploaded' - 3. Trigger ingestion: `POST /tables/ingest` - 4. DuckDB staging tables created from S3 patterns - 5. Data copied from DuckDB to Kuzu - 6. Per-table results and metrics returned - - **Rebuild Feature:** - Setting `rebuild=true` regenerates the entire graph database from scratch: - - Deletes existing Kuzu database - - Recreates with fresh schema from active GraphSchema - - Ingests all data files - - Safe operation - S3 is source of truth - - Useful for schema changes or data corrections - - Graph marked as 'rebuilding' during process - - **Error Handling:** - - Per-table error isolation with `ignore_errors` flag - - Partial success support (some tables succeed, some fail) - - Detailed error reporting per table - - Graph status tracking throughout process - - Automatic failure recovery and cleanup - - **Performance:** - - Processes all tables in sequence - - Each table timed independently - - Total execution metrics provided - - Scales to thousands of files - - Optimized for large datasets - - **Concurrency Control:** - Only one ingestion can run per graph at a time. If another ingestion is in progress, - you'll receive a 409 Conflict error. The distributed lock automatically expires after - the configured TTL (default: 1 hour) to prevent deadlocks from failed ingestions. - - **Subgraph Support:** - This endpoint accepts both parent graph IDs and subgraph IDs. - - Parent graph: Use `graph_id` like `kg0123456789abcdef` - - Subgraph: Use full subgraph ID like `kg0123456789abcdef_dev` - Each subgraph has independent staging tables and graph data. Ingestion operates - on the specified graph/subgraph only and does not affect other subgraphs. - - **Important Notes:** - - Only files with 'uploaded' status are processed - - Tables with no uploaded files are skipped - - Use `ignore_errors=false` for strict validation - - Monitor progress via per-table results - - Check graph metadata for rebuild status - - Wait for current ingestion to complete before starting another - - Table ingestion is included - no credit consumption - - Args: - graph_id (str): - body (BulkIngestRequest): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, BulkIngestResponse, ErrorResponse, HTTPValidationError] - """ - - return sync_detailed( - graph_id=graph_id, - client=client, - body=body, - ).parsed - - -async def asyncio_detailed( - graph_id: str, - *, - client: AuthenticatedClient, - body: BulkIngestRequest, -) -> Response[Union[Any, BulkIngestResponse, ErrorResponse, HTTPValidationError]]: - """Ingest Tables to Graph - - Load all files from S3 into DuckDB staging tables and ingest into Kuzu graph database. - - Orchestrates the complete data pipeline from S3 staging files into the Kuzu graph database. - Processes all tables in a single bulk operation with comprehensive error handling and metrics. - - **Use Cases:** - - Initial graph population from uploaded data - - Incremental data updates with new files - - Complete database rebuild from source files - - Recovery from failed ingestion attempts - - **Workflow:** - 1. Upload data files via `POST /tables/{table_name}/files` - 2. Files are validated and marked as 'uploaded' - 3. Trigger ingestion: `POST /tables/ingest` - 4. DuckDB staging tables created from S3 patterns - 5. Data copied from DuckDB to Kuzu - 6. Per-table results and metrics returned - - **Rebuild Feature:** - Setting `rebuild=true` regenerates the entire graph database from scratch: - - Deletes existing Kuzu database - - Recreates with fresh schema from active GraphSchema - - Ingests all data files - - Safe operation - S3 is source of truth - - Useful for schema changes or data corrections - - Graph marked as 'rebuilding' during process - - **Error Handling:** - - Per-table error isolation with `ignore_errors` flag - - Partial success support (some tables succeed, some fail) - - Detailed error reporting per table - - Graph status tracking throughout process - - Automatic failure recovery and cleanup - - **Performance:** - - Processes all tables in sequence - - Each table timed independently - - Total execution metrics provided - - Scales to thousands of files - - Optimized for large datasets - - **Concurrency Control:** - Only one ingestion can run per graph at a time. If another ingestion is in progress, - you'll receive a 409 Conflict error. The distributed lock automatically expires after - the configured TTL (default: 1 hour) to prevent deadlocks from failed ingestions. - - **Subgraph Support:** - This endpoint accepts both parent graph IDs and subgraph IDs. - - Parent graph: Use `graph_id` like `kg0123456789abcdef` - - Subgraph: Use full subgraph ID like `kg0123456789abcdef_dev` - Each subgraph has independent staging tables and graph data. Ingestion operates - on the specified graph/subgraph only and does not affect other subgraphs. - - **Important Notes:** - - Only files with 'uploaded' status are processed - - Tables with no uploaded files are skipped - - Use `ignore_errors=false` for strict validation - - Monitor progress via per-table results - - Check graph metadata for rebuild status - - Wait for current ingestion to complete before starting another - - Table ingestion is included - no credit consumption - - Args: - graph_id (str): - body (BulkIngestRequest): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, BulkIngestResponse, ErrorResponse, HTTPValidationError]] - """ - - kwargs = _get_kwargs( - graph_id=graph_id, - body=body, - ) - - response = await client.get_async_httpx_client().request(**kwargs) - - return _build_response(client=client, response=response) - - -async def asyncio( - graph_id: str, - *, - client: AuthenticatedClient, - body: BulkIngestRequest, -) -> Optional[Union[Any, BulkIngestResponse, ErrorResponse, HTTPValidationError]]: - """Ingest Tables to Graph - - Load all files from S3 into DuckDB staging tables and ingest into Kuzu graph database. - - Orchestrates the complete data pipeline from S3 staging files into the Kuzu graph database. - Processes all tables in a single bulk operation with comprehensive error handling and metrics. - - **Use Cases:** - - Initial graph population from uploaded data - - Incremental data updates with new files - - Complete database rebuild from source files - - Recovery from failed ingestion attempts - - **Workflow:** - 1. Upload data files via `POST /tables/{table_name}/files` - 2. Files are validated and marked as 'uploaded' - 3. Trigger ingestion: `POST /tables/ingest` - 4. DuckDB staging tables created from S3 patterns - 5. Data copied from DuckDB to Kuzu - 6. Per-table results and metrics returned - - **Rebuild Feature:** - Setting `rebuild=true` regenerates the entire graph database from scratch: - - Deletes existing Kuzu database - - Recreates with fresh schema from active GraphSchema - - Ingests all data files - - Safe operation - S3 is source of truth - - Useful for schema changes or data corrections - - Graph marked as 'rebuilding' during process - - **Error Handling:** - - Per-table error isolation with `ignore_errors` flag - - Partial success support (some tables succeed, some fail) - - Detailed error reporting per table - - Graph status tracking throughout process - - Automatic failure recovery and cleanup - - **Performance:** - - Processes all tables in sequence - - Each table timed independently - - Total execution metrics provided - - Scales to thousands of files - - Optimized for large datasets - - **Concurrency Control:** - Only one ingestion can run per graph at a time. If another ingestion is in progress, - you'll receive a 409 Conflict error. The distributed lock automatically expires after - the configured TTL (default: 1 hour) to prevent deadlocks from failed ingestions. - - **Subgraph Support:** - This endpoint accepts both parent graph IDs and subgraph IDs. - - Parent graph: Use `graph_id` like `kg0123456789abcdef` - - Subgraph: Use full subgraph ID like `kg0123456789abcdef_dev` - Each subgraph has independent staging tables and graph data. Ingestion operates - on the specified graph/subgraph only and does not affect other subgraphs. - - **Important Notes:** - - Only files with 'uploaded' status are processed - - Tables with no uploaded files are skipped - - Use `ignore_errors=false` for strict validation - - Monitor progress via per-table results - - Check graph metadata for rebuild status - - Wait for current ingestion to complete before starting another - - Table ingestion is included - no credit consumption - - Args: - graph_id (str): - body (BulkIngestRequest): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, BulkIngestResponse, ErrorResponse, HTTPValidationError] - """ - - return ( - await asyncio_detailed( - graph_id=graph_id, - client=client, - body=body, - ) - ).parsed diff --git a/robosystems_client/api/tables/list_table_files.py b/robosystems_client/api/tables/list_table_files.py deleted file mode 100644 index 95a95cb..0000000 --- a/robosystems_client/api/tables/list_table_files.py +++ /dev/null @@ -1,329 +0,0 @@ -from http import HTTPStatus -from typing import Any, Optional, Union, cast - -import httpx - -from ... import errors -from ...client import AuthenticatedClient, Client -from ...models.error_response import ErrorResponse -from ...models.http_validation_error import HTTPValidationError -from ...models.list_table_files_response import ListTableFilesResponse -from ...types import Response - - -def _get_kwargs( - graph_id: str, - table_name: str, -) -> dict[str, Any]: - _kwargs: dict[str, Any] = { - "method": "get", - "url": f"/v1/graphs/{graph_id}/tables/{table_name}/files", - } - - return _kwargs - - -def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError, ListTableFilesResponse]]: - if response.status_code == 200: - response_200 = ListTableFilesResponse.from_dict(response.json()) - - return response_200 - - if response.status_code == 401: - response_401 = cast(Any, None) - return response_401 - - if response.status_code == 403: - response_403 = ErrorResponse.from_dict(response.json()) - - return response_403 - - if response.status_code == 404: - response_404 = ErrorResponse.from_dict(response.json()) - - return response_404 - - if response.status_code == 422: - response_422 = HTTPValidationError.from_dict(response.json()) - - return response_422 - - if response.status_code == 500: - response_500 = cast(Any, None) - return response_500 - - if client.raise_on_unexpected_status: - raise errors.UnexpectedStatus(response.status_code, response.content) - else: - return None - - -def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, ErrorResponse, HTTPValidationError, ListTableFilesResponse]]: - return Response( - status_code=HTTPStatus(response.status_code), - content=response.content, - headers=response.headers, - parsed=_parse_response(client=client, response=response), - ) - - -def sync_detailed( - graph_id: str, - table_name: str, - *, - client: AuthenticatedClient, -) -> Response[Union[Any, ErrorResponse, HTTPValidationError, ListTableFilesResponse]]: - """List Files in Staging Table - - List all files uploaded to a staging table with comprehensive metadata. - - Get a complete inventory of all files in a staging table, including upload status, - file sizes, row counts, and S3 locations. Essential for monitoring upload progress - and validating data before ingestion. - - **Use Cases:** - - Monitor file upload progress - - Verify files are ready for ingestion - - Check file formats and sizes - - Track storage usage per table - - Identify failed or incomplete uploads - - Pre-ingestion validation - - **Returned Metadata:** - - File ID, name, and format (parquet, csv, json) - - Size in bytes and row count (if available) - - Upload status and method - - Creation and upload timestamps - - S3 key for reference - - **Upload Status Values:** - - `pending`: Upload URL generated, awaiting upload - - `uploaded`: Successfully uploaded, ready for ingestion - - `disabled`: Excluded from ingestion - - `archived`: Soft deleted - - `failed`: Upload failed - - **Important Notes:** - - Only `uploaded` files are ingested - - Check `row_count` to estimate data volume - - Use `total_size_bytes` for storage monitoring - - Files with `failed` status should be deleted and re-uploaded - - File listing is included - no credit consumption - - Args: - graph_id (str): - table_name (str): Table name - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError, ListTableFilesResponse]] - """ - - kwargs = _get_kwargs( - graph_id=graph_id, - table_name=table_name, - ) - - response = client.get_httpx_client().request( - **kwargs, - ) - - return _build_response(client=client, response=response) - - -def sync( - graph_id: str, - table_name: str, - *, - client: AuthenticatedClient, -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError, ListTableFilesResponse]]: - """List Files in Staging Table - - List all files uploaded to a staging table with comprehensive metadata. - - Get a complete inventory of all files in a staging table, including upload status, - file sizes, row counts, and S3 locations. Essential for monitoring upload progress - and validating data before ingestion. - - **Use Cases:** - - Monitor file upload progress - - Verify files are ready for ingestion - - Check file formats and sizes - - Track storage usage per table - - Identify failed or incomplete uploads - - Pre-ingestion validation - - **Returned Metadata:** - - File ID, name, and format (parquet, csv, json) - - Size in bytes and row count (if available) - - Upload status and method - - Creation and upload timestamps - - S3 key for reference - - **Upload Status Values:** - - `pending`: Upload URL generated, awaiting upload - - `uploaded`: Successfully uploaded, ready for ingestion - - `disabled`: Excluded from ingestion - - `archived`: Soft deleted - - `failed`: Upload failed - - **Important Notes:** - - Only `uploaded` files are ingested - - Check `row_count` to estimate data volume - - Use `total_size_bytes` for storage monitoring - - Files with `failed` status should be deleted and re-uploaded - - File listing is included - no credit consumption - - Args: - graph_id (str): - table_name (str): Table name - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, ErrorResponse, HTTPValidationError, ListTableFilesResponse] - """ - - return sync_detailed( - graph_id=graph_id, - table_name=table_name, - client=client, - ).parsed - - -async def asyncio_detailed( - graph_id: str, - table_name: str, - *, - client: AuthenticatedClient, -) -> Response[Union[Any, ErrorResponse, HTTPValidationError, ListTableFilesResponse]]: - """List Files in Staging Table - - List all files uploaded to a staging table with comprehensive metadata. - - Get a complete inventory of all files in a staging table, including upload status, - file sizes, row counts, and S3 locations. Essential for monitoring upload progress - and validating data before ingestion. - - **Use Cases:** - - Monitor file upload progress - - Verify files are ready for ingestion - - Check file formats and sizes - - Track storage usage per table - - Identify failed or incomplete uploads - - Pre-ingestion validation - - **Returned Metadata:** - - File ID, name, and format (parquet, csv, json) - - Size in bytes and row count (if available) - - Upload status and method - - Creation and upload timestamps - - S3 key for reference - - **Upload Status Values:** - - `pending`: Upload URL generated, awaiting upload - - `uploaded`: Successfully uploaded, ready for ingestion - - `disabled`: Excluded from ingestion - - `archived`: Soft deleted - - `failed`: Upload failed - - **Important Notes:** - - Only `uploaded` files are ingested - - Check `row_count` to estimate data volume - - Use `total_size_bytes` for storage monitoring - - Files with `failed` status should be deleted and re-uploaded - - File listing is included - no credit consumption - - Args: - graph_id (str): - table_name (str): Table name - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError, ListTableFilesResponse]] - """ - - kwargs = _get_kwargs( - graph_id=graph_id, - table_name=table_name, - ) - - response = await client.get_async_httpx_client().request(**kwargs) - - return _build_response(client=client, response=response) - - -async def asyncio( - graph_id: str, - table_name: str, - *, - client: AuthenticatedClient, -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError, ListTableFilesResponse]]: - """List Files in Staging Table - - List all files uploaded to a staging table with comprehensive metadata. - - Get a complete inventory of all files in a staging table, including upload status, - file sizes, row counts, and S3 locations. Essential for monitoring upload progress - and validating data before ingestion. - - **Use Cases:** - - Monitor file upload progress - - Verify files are ready for ingestion - - Check file formats and sizes - - Track storage usage per table - - Identify failed or incomplete uploads - - Pre-ingestion validation - - **Returned Metadata:** - - File ID, name, and format (parquet, csv, json) - - Size in bytes and row count (if available) - - Upload status and method - - Creation and upload timestamps - - S3 key for reference - - **Upload Status Values:** - - `pending`: Upload URL generated, awaiting upload - - `uploaded`: Successfully uploaded, ready for ingestion - - `disabled`: Excluded from ingestion - - `archived`: Soft deleted - - `failed`: Upload failed - - **Important Notes:** - - Only `uploaded` files are ingested - - Check `row_count` to estimate data volume - - Use `total_size_bytes` for storage monitoring - - Files with `failed` status should be deleted and re-uploaded - - File listing is included - no credit consumption - - Args: - graph_id (str): - table_name (str): Table name - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, ErrorResponse, HTTPValidationError, ListTableFilesResponse] - """ - - return ( - await asyncio_detailed( - graph_id=graph_id, - table_name=table_name, - client=client, - ) - ).parsed diff --git a/robosystems_client/api/tables/list_tables.py b/robosystems_client/api/tables/list_tables.py index 0aeb634..18990bd 100644 --- a/robosystems_client/api/tables/list_tables.py +++ b/robosystems_client/api/tables/list_tables.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -23,8 +23,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError, TableListResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | ErrorResponse | HTTPValidationError | TableListResponse | None: if response.status_code == 200: response_200 = TableListResponse.from_dict(response.json()) @@ -60,8 +60,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, ErrorResponse, HTTPValidationError, TableListResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | ErrorResponse | HTTPValidationError | TableListResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -74,7 +74,7 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, ErrorResponse, HTTPValidationError, TableListResponse]]: +) -> Response[Any | ErrorResponse | HTTPValidationError | TableListResponse]: """List Staging Tables List all DuckDB staging tables with comprehensive metrics and status. @@ -120,7 +120,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError, TableListResponse]] + Response[Any | ErrorResponse | HTTPValidationError | TableListResponse] """ kwargs = _get_kwargs( @@ -138,7 +138,7 @@ def sync( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError, TableListResponse]]: +) -> Any | ErrorResponse | HTTPValidationError | TableListResponse | None: """List Staging Tables List all DuckDB staging tables with comprehensive metrics and status. @@ -184,7 +184,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, ErrorResponse, HTTPValidationError, TableListResponse] + Any | ErrorResponse | HTTPValidationError | TableListResponse """ return sync_detailed( @@ -197,7 +197,7 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[Any, ErrorResponse, HTTPValidationError, TableListResponse]]: +) -> Response[Any | ErrorResponse | HTTPValidationError | TableListResponse]: """List Staging Tables List all DuckDB staging tables with comprehensive metrics and status. @@ -243,7 +243,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError, TableListResponse]] + Response[Any | ErrorResponse | HTTPValidationError | TableListResponse] """ kwargs = _get_kwargs( @@ -259,7 +259,7 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError, TableListResponse]]: +) -> Any | ErrorResponse | HTTPValidationError | TableListResponse | None: """List Staging Tables List all DuckDB staging tables with comprehensive metrics and status. @@ -305,7 +305,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, ErrorResponse, HTTPValidationError, TableListResponse] + Any | ErrorResponse | HTTPValidationError | TableListResponse """ return ( diff --git a/robosystems_client/api/tables/query_tables.py b/robosystems_client/api/tables/query_tables.py index 69aa1c0..23044d3 100644 --- a/robosystems_client/api/tables/query_tables.py +++ b/robosystems_client/api/tables/query_tables.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union, cast +from typing import Any, cast import httpx @@ -33,8 +33,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError, TableQueryResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | ErrorResponse | HTTPValidationError | TableQueryResponse | None: if response.status_code == 200: response_200 = TableQueryResponse.from_dict(response.json()) @@ -79,8 +79,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, ErrorResponse, HTTPValidationError, TableQueryResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | ErrorResponse | HTTPValidationError | TableQueryResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -94,7 +94,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: TableQueryRequest, -) -> Response[Union[Any, ErrorResponse, HTTPValidationError, TableQueryResponse]]: +) -> Response[Any | ErrorResponse | HTTPValidationError | TableQueryResponse]: r"""Query Staging Tables with SQL Execute SQL queries on DuckDB staging tables for data inspection and validation. @@ -168,7 +168,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError, TableQueryResponse]] + Response[Any | ErrorResponse | HTTPValidationError | TableQueryResponse] """ kwargs = _get_kwargs( @@ -188,7 +188,7 @@ def sync( *, client: AuthenticatedClient, body: TableQueryRequest, -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError, TableQueryResponse]]: +) -> Any | ErrorResponse | HTTPValidationError | TableQueryResponse | None: r"""Query Staging Tables with SQL Execute SQL queries on DuckDB staging tables for data inspection and validation. @@ -262,7 +262,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, ErrorResponse, HTTPValidationError, TableQueryResponse] + Any | ErrorResponse | HTTPValidationError | TableQueryResponse """ return sync_detailed( @@ -277,7 +277,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: TableQueryRequest, -) -> Response[Union[Any, ErrorResponse, HTTPValidationError, TableQueryResponse]]: +) -> Response[Any | ErrorResponse | HTTPValidationError | TableQueryResponse]: r"""Query Staging Tables with SQL Execute SQL queries on DuckDB staging tables for data inspection and validation. @@ -351,7 +351,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError, TableQueryResponse]] + Response[Any | ErrorResponse | HTTPValidationError | TableQueryResponse] """ kwargs = _get_kwargs( @@ -369,7 +369,7 @@ async def asyncio( *, client: AuthenticatedClient, body: TableQueryRequest, -) -> Optional[Union[Any, ErrorResponse, HTTPValidationError, TableQueryResponse]]: +) -> Any | ErrorResponse | HTTPValidationError | TableQueryResponse | None: r"""Query Staging Tables with SQL Execute SQL queries on DuckDB staging tables for data inspection and validation. @@ -443,7 +443,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, ErrorResponse, HTTPValidationError, TableQueryResponse] + Any | ErrorResponse | HTTPValidationError | TableQueryResponse """ return ( diff --git a/robosystems_client/api/tables/update_file_status.py b/robosystems_client/api/tables/update_file_status.py deleted file mode 100644 index ef41761..0000000 --- a/robosystems_client/api/tables/update_file_status.py +++ /dev/null @@ -1,395 +0,0 @@ -from http import HTTPStatus -from typing import Any, Optional, Union, cast - -import httpx - -from ... import errors -from ...client import AuthenticatedClient, Client -from ...models.error_response import ErrorResponse -from ...models.file_status_update import FileStatusUpdate -from ...models.http_validation_error import HTTPValidationError -from ...models.update_file_status_response_updatefilestatus import ( - UpdateFileStatusResponseUpdatefilestatus, -) -from ...types import Response - - -def _get_kwargs( - graph_id: str, - file_id: str, - *, - body: FileStatusUpdate, -) -> dict[str, Any]: - headers: dict[str, Any] = {} - - _kwargs: dict[str, Any] = { - "method": "patch", - "url": f"/v1/graphs/{graph_id}/tables/files/{file_id}", - } - - _kwargs["json"] = body.to_dict() - - headers["Content-Type"] = "application/json" - - _kwargs["headers"] = headers - return _kwargs - - -def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[ - Union[ - Any, ErrorResponse, HTTPValidationError, UpdateFileStatusResponseUpdatefilestatus - ] -]: - if response.status_code == 200: - response_200 = UpdateFileStatusResponseUpdatefilestatus.from_dict(response.json()) - - return response_200 - - if response.status_code == 400: - response_400 = ErrorResponse.from_dict(response.json()) - - return response_400 - - if response.status_code == 401: - response_401 = cast(Any, None) - return response_401 - - if response.status_code == 403: - response_403 = ErrorResponse.from_dict(response.json()) - - return response_403 - - if response.status_code == 404: - response_404 = ErrorResponse.from_dict(response.json()) - - return response_404 - - if response.status_code == 413: - response_413 = ErrorResponse.from_dict(response.json()) - - return response_413 - - if response.status_code == 422: - response_422 = HTTPValidationError.from_dict(response.json()) - - return response_422 - - if response.status_code == 500: - response_500 = cast(Any, None) - return response_500 - - if client.raise_on_unexpected_status: - raise errors.UnexpectedStatus(response.status_code, response.content) - else: - return None - - -def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[ - Union[ - Any, ErrorResponse, HTTPValidationError, UpdateFileStatusResponseUpdatefilestatus - ] -]: - return Response( - status_code=HTTPStatus(response.status_code), - content=response.content, - headers=response.headers, - parsed=_parse_response(client=client, response=response), - ) - - -def sync_detailed( - graph_id: str, - file_id: str, - *, - client: AuthenticatedClient, - body: FileStatusUpdate, -) -> Response[ - Union[ - Any, ErrorResponse, HTTPValidationError, UpdateFileStatusResponseUpdatefilestatus - ] -]: - """Update File Upload Status - - Update file status after upload completes. - - Marks files as uploaded after successful S3 upload. The backend validates - the file, calculates size and row count, enforces storage limits, and - registers the DuckDB table for queries. - - **Status Values:** - - `uploaded`: File successfully uploaded to S3 (triggers validation) - - `disabled`: Exclude file from ingestion - - `archived`: Soft delete file - - **What Happens on 'uploaded' Status:** - 1. Verify file exists in S3 - 2. Calculate actual file size - 3. Enforce tier storage limits - 4. Calculate or estimate row count - 5. Update table statistics - 6. Register DuckDB external table - 7. File ready for ingestion - - **Row Count Calculation:** - - **Parquet**: Exact count from file metadata - - **CSV**: Count rows (minus header) - - **JSON**: Count array elements - - **Fallback**: Estimate from file size if reading fails - - **Storage Limits:** - Enforced per subscription tier. Returns HTTP 413 if limit exceeded. - Check current usage before large uploads. - - **Important Notes:** - - Always call this after S3 upload completes - - Check response for actual row count - - Storage limit errors (413) mean tier upgrade needed - - DuckDB registration failures are non-fatal (retried later) - - Status updates are included - no credit consumption - - Args: - graph_id (str): - file_id (str): File identifier - body (FileStatusUpdate): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError, UpdateFileStatusResponseUpdatefilestatus]] - """ - - kwargs = _get_kwargs( - graph_id=graph_id, - file_id=file_id, - body=body, - ) - - response = client.get_httpx_client().request( - **kwargs, - ) - - return _build_response(client=client, response=response) - - -def sync( - graph_id: str, - file_id: str, - *, - client: AuthenticatedClient, - body: FileStatusUpdate, -) -> Optional[ - Union[ - Any, ErrorResponse, HTTPValidationError, UpdateFileStatusResponseUpdatefilestatus - ] -]: - """Update File Upload Status - - Update file status after upload completes. - - Marks files as uploaded after successful S3 upload. The backend validates - the file, calculates size and row count, enforces storage limits, and - registers the DuckDB table for queries. - - **Status Values:** - - `uploaded`: File successfully uploaded to S3 (triggers validation) - - `disabled`: Exclude file from ingestion - - `archived`: Soft delete file - - **What Happens on 'uploaded' Status:** - 1. Verify file exists in S3 - 2. Calculate actual file size - 3. Enforce tier storage limits - 4. Calculate or estimate row count - 5. Update table statistics - 6. Register DuckDB external table - 7. File ready for ingestion - - **Row Count Calculation:** - - **Parquet**: Exact count from file metadata - - **CSV**: Count rows (minus header) - - **JSON**: Count array elements - - **Fallback**: Estimate from file size if reading fails - - **Storage Limits:** - Enforced per subscription tier. Returns HTTP 413 if limit exceeded. - Check current usage before large uploads. - - **Important Notes:** - - Always call this after S3 upload completes - - Check response for actual row count - - Storage limit errors (413) mean tier upgrade needed - - DuckDB registration failures are non-fatal (retried later) - - Status updates are included - no credit consumption - - Args: - graph_id (str): - file_id (str): File identifier - body (FileStatusUpdate): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, ErrorResponse, HTTPValidationError, UpdateFileStatusResponseUpdatefilestatus] - """ - - return sync_detailed( - graph_id=graph_id, - file_id=file_id, - client=client, - body=body, - ).parsed - - -async def asyncio_detailed( - graph_id: str, - file_id: str, - *, - client: AuthenticatedClient, - body: FileStatusUpdate, -) -> Response[ - Union[ - Any, ErrorResponse, HTTPValidationError, UpdateFileStatusResponseUpdatefilestatus - ] -]: - """Update File Upload Status - - Update file status after upload completes. - - Marks files as uploaded after successful S3 upload. The backend validates - the file, calculates size and row count, enforces storage limits, and - registers the DuckDB table for queries. - - **Status Values:** - - `uploaded`: File successfully uploaded to S3 (triggers validation) - - `disabled`: Exclude file from ingestion - - `archived`: Soft delete file - - **What Happens on 'uploaded' Status:** - 1. Verify file exists in S3 - 2. Calculate actual file size - 3. Enforce tier storage limits - 4. Calculate or estimate row count - 5. Update table statistics - 6. Register DuckDB external table - 7. File ready for ingestion - - **Row Count Calculation:** - - **Parquet**: Exact count from file metadata - - **CSV**: Count rows (minus header) - - **JSON**: Count array elements - - **Fallback**: Estimate from file size if reading fails - - **Storage Limits:** - Enforced per subscription tier. Returns HTTP 413 if limit exceeded. - Check current usage before large uploads. - - **Important Notes:** - - Always call this after S3 upload completes - - Check response for actual row count - - Storage limit errors (413) mean tier upgrade needed - - DuckDB registration failures are non-fatal (retried later) - - Status updates are included - no credit consumption - - Args: - graph_id (str): - file_id (str): File identifier - body (FileStatusUpdate): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, ErrorResponse, HTTPValidationError, UpdateFileStatusResponseUpdatefilestatus]] - """ - - kwargs = _get_kwargs( - graph_id=graph_id, - file_id=file_id, - body=body, - ) - - response = await client.get_async_httpx_client().request(**kwargs) - - return _build_response(client=client, response=response) - - -async def asyncio( - graph_id: str, - file_id: str, - *, - client: AuthenticatedClient, - body: FileStatusUpdate, -) -> Optional[ - Union[ - Any, ErrorResponse, HTTPValidationError, UpdateFileStatusResponseUpdatefilestatus - ] -]: - """Update File Upload Status - - Update file status after upload completes. - - Marks files as uploaded after successful S3 upload. The backend validates - the file, calculates size and row count, enforces storage limits, and - registers the DuckDB table for queries. - - **Status Values:** - - `uploaded`: File successfully uploaded to S3 (triggers validation) - - `disabled`: Exclude file from ingestion - - `archived`: Soft delete file - - **What Happens on 'uploaded' Status:** - 1. Verify file exists in S3 - 2. Calculate actual file size - 3. Enforce tier storage limits - 4. Calculate or estimate row count - 5. Update table statistics - 6. Register DuckDB external table - 7. File ready for ingestion - - **Row Count Calculation:** - - **Parquet**: Exact count from file metadata - - **CSV**: Count rows (minus header) - - **JSON**: Count array elements - - **Fallback**: Estimate from file size if reading fails - - **Storage Limits:** - Enforced per subscription tier. Returns HTTP 413 if limit exceeded. - Check current usage before large uploads. - - **Important Notes:** - - Always call this after S3 upload completes - - Check response for actual row count - - Storage limit errors (413) mean tier upgrade needed - - DuckDB registration failures are non-fatal (retried later) - - Status updates are included - no credit consumption - - Args: - graph_id (str): - file_id (str): File identifier - body (FileStatusUpdate): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, ErrorResponse, HTTPValidationError, UpdateFileStatusResponseUpdatefilestatus] - """ - - return ( - await asyncio_detailed( - graph_id=graph_id, - file_id=file_id, - client=client, - body=body, - ) - ).parsed diff --git a/robosystems_client/api/usage/get_graph_metrics.py b/robosystems_client/api/usage/get_graph_metrics.py index 682abed..b6264dd 100644 --- a/robosystems_client/api/usage/get_graph_metrics.py +++ b/robosystems_client/api/usage/get_graph_metrics.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -23,8 +23,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[ErrorResponse, GraphMetricsResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ErrorResponse | GraphMetricsResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = GraphMetricsResponse.from_dict(response.json()) @@ -57,8 +57,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[ErrorResponse, GraphMetricsResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[ErrorResponse | GraphMetricsResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -71,7 +71,7 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[ErrorResponse, GraphMetricsResponse, HTTPValidationError]]: +) -> Response[ErrorResponse | GraphMetricsResponse | HTTPValidationError]: """Get Graph Metrics Get comprehensive metrics for the graph database. @@ -100,7 +100,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, GraphMetricsResponse, HTTPValidationError]] + Response[ErrorResponse | GraphMetricsResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -118,7 +118,7 @@ def sync( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[ErrorResponse, GraphMetricsResponse, HTTPValidationError]]: +) -> ErrorResponse | GraphMetricsResponse | HTTPValidationError | None: """Get Graph Metrics Get comprehensive metrics for the graph database. @@ -147,7 +147,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, GraphMetricsResponse, HTTPValidationError] + ErrorResponse | GraphMetricsResponse | HTTPValidationError """ return sync_detailed( @@ -160,7 +160,7 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, -) -> Response[Union[ErrorResponse, GraphMetricsResponse, HTTPValidationError]]: +) -> Response[ErrorResponse | GraphMetricsResponse | HTTPValidationError]: """Get Graph Metrics Get comprehensive metrics for the graph database. @@ -189,7 +189,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, GraphMetricsResponse, HTTPValidationError]] + Response[ErrorResponse | GraphMetricsResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -205,7 +205,7 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[ErrorResponse, GraphMetricsResponse, HTTPValidationError]]: +) -> ErrorResponse | GraphMetricsResponse | HTTPValidationError | None: """Get Graph Metrics Get comprehensive metrics for the graph database. @@ -234,7 +234,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, GraphMetricsResponse, HTTPValidationError] + ErrorResponse | GraphMetricsResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/usage/get_graph_usage_analytics.py b/robosystems_client/api/usage/get_graph_usage_analytics.py index 95a0d39..89e50cb 100644 --- a/robosystems_client/api/usage/get_graph_usage_analytics.py +++ b/robosystems_client/api/usage/get_graph_usage_analytics.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -14,11 +14,11 @@ def _get_kwargs( graph_id: str, *, - time_range: Union[Unset, str] = "30d", - include_storage: Union[Unset, bool] = True, - include_credits: Union[Unset, bool] = True, - include_performance: Union[Unset, bool] = False, - include_events: Union[Unset, bool] = False, + time_range: str | Unset = "30d", + include_storage: bool | Unset = True, + include_credits: bool | Unset = True, + include_performance: bool | Unset = False, + include_events: bool | Unset = False, ) -> dict[str, Any]: params: dict[str, Any] = {} @@ -44,8 +44,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[ErrorResponse, GraphUsageResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ErrorResponse | GraphUsageResponse | HTTPValidationError | None: if response.status_code == 200: response_200 = GraphUsageResponse.from_dict(response.json()) @@ -73,8 +73,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[ErrorResponse, GraphUsageResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[ErrorResponse | GraphUsageResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -87,12 +87,12 @@ def sync_detailed( graph_id: str, *, client: AuthenticatedClient, - time_range: Union[Unset, str] = "30d", - include_storage: Union[Unset, bool] = True, - include_credits: Union[Unset, bool] = True, - include_performance: Union[Unset, bool] = False, - include_events: Union[Unset, bool] = False, -) -> Response[Union[ErrorResponse, GraphUsageResponse, HTTPValidationError]]: + time_range: str | Unset = "30d", + include_storage: bool | Unset = True, + include_credits: bool | Unset = True, + include_performance: bool | Unset = False, + include_events: bool | Unset = False, +) -> Response[ErrorResponse | GraphUsageResponse | HTTPValidationError]: """Get Graph Usage Analytics Get comprehensive usage analytics tracked by the GraphUsage model. @@ -127,20 +127,20 @@ def sync_detailed( Args: graph_id (str): - time_range (Union[Unset, str]): Time range: 24h, 7d, 30d, current_month, last_month - Default: '30d'. - include_storage (Union[Unset, bool]): Include storage usage summary Default: True. - include_credits (Union[Unset, bool]): Include credit consumption summary Default: True. - include_performance (Union[Unset, bool]): Include performance insights (may be slower) - Default: False. - include_events (Union[Unset, bool]): Include recent usage events Default: False. + time_range (str | Unset): Time range: 24h, 7d, 30d, current_month, last_month Default: + '30d'. + include_storage (bool | Unset): Include storage usage summary Default: True. + include_credits (bool | Unset): Include credit consumption summary Default: True. + include_performance (bool | Unset): Include performance insights (may be slower) Default: + False. + include_events (bool | Unset): Include recent usage events Default: False. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, GraphUsageResponse, HTTPValidationError]] + Response[ErrorResponse | GraphUsageResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -163,12 +163,12 @@ def sync( graph_id: str, *, client: AuthenticatedClient, - time_range: Union[Unset, str] = "30d", - include_storage: Union[Unset, bool] = True, - include_credits: Union[Unset, bool] = True, - include_performance: Union[Unset, bool] = False, - include_events: Union[Unset, bool] = False, -) -> Optional[Union[ErrorResponse, GraphUsageResponse, HTTPValidationError]]: + time_range: str | Unset = "30d", + include_storage: bool | Unset = True, + include_credits: bool | Unset = True, + include_performance: bool | Unset = False, + include_events: bool | Unset = False, +) -> ErrorResponse | GraphUsageResponse | HTTPValidationError | None: """Get Graph Usage Analytics Get comprehensive usage analytics tracked by the GraphUsage model. @@ -203,20 +203,20 @@ def sync( Args: graph_id (str): - time_range (Union[Unset, str]): Time range: 24h, 7d, 30d, current_month, last_month - Default: '30d'. - include_storage (Union[Unset, bool]): Include storage usage summary Default: True. - include_credits (Union[Unset, bool]): Include credit consumption summary Default: True. - include_performance (Union[Unset, bool]): Include performance insights (may be slower) - Default: False. - include_events (Union[Unset, bool]): Include recent usage events Default: False. + time_range (str | Unset): Time range: 24h, 7d, 30d, current_month, last_month Default: + '30d'. + include_storage (bool | Unset): Include storage usage summary Default: True. + include_credits (bool | Unset): Include credit consumption summary Default: True. + include_performance (bool | Unset): Include performance insights (may be slower) Default: + False. + include_events (bool | Unset): Include recent usage events Default: False. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, GraphUsageResponse, HTTPValidationError] + ErrorResponse | GraphUsageResponse | HTTPValidationError """ return sync_detailed( @@ -234,12 +234,12 @@ async def asyncio_detailed( graph_id: str, *, client: AuthenticatedClient, - time_range: Union[Unset, str] = "30d", - include_storage: Union[Unset, bool] = True, - include_credits: Union[Unset, bool] = True, - include_performance: Union[Unset, bool] = False, - include_events: Union[Unset, bool] = False, -) -> Response[Union[ErrorResponse, GraphUsageResponse, HTTPValidationError]]: + time_range: str | Unset = "30d", + include_storage: bool | Unset = True, + include_credits: bool | Unset = True, + include_performance: bool | Unset = False, + include_events: bool | Unset = False, +) -> Response[ErrorResponse | GraphUsageResponse | HTTPValidationError]: """Get Graph Usage Analytics Get comprehensive usage analytics tracked by the GraphUsage model. @@ -274,20 +274,20 @@ async def asyncio_detailed( Args: graph_id (str): - time_range (Union[Unset, str]): Time range: 24h, 7d, 30d, current_month, last_month - Default: '30d'. - include_storage (Union[Unset, bool]): Include storage usage summary Default: True. - include_credits (Union[Unset, bool]): Include credit consumption summary Default: True. - include_performance (Union[Unset, bool]): Include performance insights (may be slower) - Default: False. - include_events (Union[Unset, bool]): Include recent usage events Default: False. + time_range (str | Unset): Time range: 24h, 7d, 30d, current_month, last_month Default: + '30d'. + include_storage (bool | Unset): Include storage usage summary Default: True. + include_credits (bool | Unset): Include credit consumption summary Default: True. + include_performance (bool | Unset): Include performance insights (may be slower) Default: + False. + include_events (bool | Unset): Include recent usage events Default: False. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, GraphUsageResponse, HTTPValidationError]] + Response[ErrorResponse | GraphUsageResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -308,12 +308,12 @@ async def asyncio( graph_id: str, *, client: AuthenticatedClient, - time_range: Union[Unset, str] = "30d", - include_storage: Union[Unset, bool] = True, - include_credits: Union[Unset, bool] = True, - include_performance: Union[Unset, bool] = False, - include_events: Union[Unset, bool] = False, -) -> Optional[Union[ErrorResponse, GraphUsageResponse, HTTPValidationError]]: + time_range: str | Unset = "30d", + include_storage: bool | Unset = True, + include_credits: bool | Unset = True, + include_performance: bool | Unset = False, + include_events: bool | Unset = False, +) -> ErrorResponse | GraphUsageResponse | HTTPValidationError | None: """Get Graph Usage Analytics Get comprehensive usage analytics tracked by the GraphUsage model. @@ -348,20 +348,20 @@ async def asyncio( Args: graph_id (str): - time_range (Union[Unset, str]): Time range: 24h, 7d, 30d, current_month, last_month - Default: '30d'. - include_storage (Union[Unset, bool]): Include storage usage summary Default: True. - include_credits (Union[Unset, bool]): Include credit consumption summary Default: True. - include_performance (Union[Unset, bool]): Include performance insights (may be slower) - Default: False. - include_events (Union[Unset, bool]): Include recent usage events Default: False. + time_range (str | Unset): Time range: 24h, 7d, 30d, current_month, last_month Default: + '30d'. + include_storage (bool | Unset): Include storage usage summary Default: True. + include_credits (bool | Unset): Include credit consumption summary Default: True. + include_performance (bool | Unset): Include performance insights (may be slower) Default: + False. + include_events (bool | Unset): Include recent usage events Default: False. Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, GraphUsageResponse, HTTPValidationError] + ErrorResponse | GraphUsageResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/user/create_user_api_key.py b/robosystems_client/api/user/create_user_api_key.py index 806835c..5a9c1d0 100644 --- a/robosystems_client/api/user/create_user_api_key.py +++ b/robosystems_client/api/user/create_user_api_key.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -31,8 +31,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[CreateAPIKeyResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> CreateAPIKeyResponse | HTTPValidationError | None: if response.status_code == 201: response_201 = CreateAPIKeyResponse.from_dict(response.json()) @@ -50,8 +50,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[CreateAPIKeyResponse, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[CreateAPIKeyResponse | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -64,7 +64,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: CreateAPIKeyRequest, -) -> Response[Union[CreateAPIKeyResponse, HTTPValidationError]]: +) -> Response[CreateAPIKeyResponse | HTTPValidationError]: """Create API Key Create a new API key for the current user. @@ -77,7 +77,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[CreateAPIKeyResponse, HTTPValidationError]] + Response[CreateAPIKeyResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -95,7 +95,7 @@ def sync( *, client: AuthenticatedClient, body: CreateAPIKeyRequest, -) -> Optional[Union[CreateAPIKeyResponse, HTTPValidationError]]: +) -> CreateAPIKeyResponse | HTTPValidationError | None: """Create API Key Create a new API key for the current user. @@ -108,7 +108,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[CreateAPIKeyResponse, HTTPValidationError] + CreateAPIKeyResponse | HTTPValidationError """ return sync_detailed( @@ -121,7 +121,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: CreateAPIKeyRequest, -) -> Response[Union[CreateAPIKeyResponse, HTTPValidationError]]: +) -> Response[CreateAPIKeyResponse | HTTPValidationError]: """Create API Key Create a new API key for the current user. @@ -134,7 +134,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[CreateAPIKeyResponse, HTTPValidationError]] + Response[CreateAPIKeyResponse | HTTPValidationError] """ kwargs = _get_kwargs( @@ -150,7 +150,7 @@ async def asyncio( *, client: AuthenticatedClient, body: CreateAPIKeyRequest, -) -> Optional[Union[CreateAPIKeyResponse, HTTPValidationError]]: +) -> CreateAPIKeyResponse | HTTPValidationError | None: """Create API Key Create a new API key for the current user. @@ -163,7 +163,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[CreateAPIKeyResponse, HTTPValidationError] + CreateAPIKeyResponse | HTTPValidationError """ return ( diff --git a/robosystems_client/api/user/get_current_user.py b/robosystems_client/api/user/get_current_user.py index a7fc919..ab49c67 100644 --- a/robosystems_client/api/user/get_current_user.py +++ b/robosystems_client/api/user/get_current_user.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -19,8 +19,8 @@ def _get_kwargs() -> dict[str, Any]: def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[UserResponse]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> UserResponse | None: if response.status_code == 200: response_200 = UserResponse.from_dict(response.json()) @@ -33,7 +33,7 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response + *, client: AuthenticatedClient | Client, response: httpx.Response ) -> Response[UserResponse]: return Response( status_code=HTTPStatus(response.status_code), @@ -71,7 +71,7 @@ def sync_detailed( def sync( *, client: AuthenticatedClient, -) -> Optional[UserResponse]: +) -> UserResponse | None: """Get Current User Returns information about the currently authenticated user. @@ -115,7 +115,7 @@ async def asyncio_detailed( async def asyncio( *, client: AuthenticatedClient, -) -> Optional[UserResponse]: +) -> UserResponse | None: """Get Current User Returns information about the currently authenticated user. diff --git a/robosystems_client/api/user/list_user_api_keys.py b/robosystems_client/api/user/list_user_api_keys.py index f230b12..65d0239 100644 --- a/robosystems_client/api/user/list_user_api_keys.py +++ b/robosystems_client/api/user/list_user_api_keys.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -19,8 +19,8 @@ def _get_kwargs() -> dict[str, Any]: def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[APIKeysResponse]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> APIKeysResponse | None: if response.status_code == 200: response_200 = APIKeysResponse.from_dict(response.json()) @@ -33,7 +33,7 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response + *, client: AuthenticatedClient | Client, response: httpx.Response ) -> Response[APIKeysResponse]: return Response( status_code=HTTPStatus(response.status_code), @@ -71,7 +71,7 @@ def sync_detailed( def sync( *, client: AuthenticatedClient, -) -> Optional[APIKeysResponse]: +) -> APIKeysResponse | None: """List API Keys Get all API keys for the current user. @@ -115,7 +115,7 @@ async def asyncio_detailed( async def asyncio( *, client: AuthenticatedClient, -) -> Optional[APIKeysResponse]: +) -> APIKeysResponse | None: """List API Keys Get all API keys for the current user. diff --git a/robosystems_client/api/user/revoke_user_api_key.py b/robosystems_client/api/user/revoke_user_api_key.py index 698bf20..ebdf30d 100644 --- a/robosystems_client/api/user/revoke_user_api_key.py +++ b/robosystems_client/api/user/revoke_user_api_key.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -23,8 +23,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ErrorResponse | HTTPValidationError | SuccessResponse | None: if response.status_code == 200: response_200 = SuccessResponse.from_dict(response.json()) @@ -52,8 +52,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[ErrorResponse | HTTPValidationError | SuccessResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -66,7 +66,7 @@ def sync_detailed( api_key_id: str, *, client: AuthenticatedClient, -) -> Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: +) -> Response[ErrorResponse | HTTPValidationError | SuccessResponse]: """Revoke API Key Revoke (deactivate) an API key. @@ -79,7 +79,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]] + Response[ErrorResponse | HTTPValidationError | SuccessResponse] """ kwargs = _get_kwargs( @@ -97,7 +97,7 @@ def sync( api_key_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: +) -> ErrorResponse | HTTPValidationError | SuccessResponse | None: """Revoke API Key Revoke (deactivate) an API key. @@ -110,7 +110,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, SuccessResponse] + ErrorResponse | HTTPValidationError | SuccessResponse """ return sync_detailed( @@ -123,7 +123,7 @@ async def asyncio_detailed( api_key_id: str, *, client: AuthenticatedClient, -) -> Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: +) -> Response[ErrorResponse | HTTPValidationError | SuccessResponse]: """Revoke API Key Revoke (deactivate) an API key. @@ -136,7 +136,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]] + Response[ErrorResponse | HTTPValidationError | SuccessResponse] """ kwargs = _get_kwargs( @@ -152,7 +152,7 @@ async def asyncio( api_key_id: str, *, client: AuthenticatedClient, -) -> Optional[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: +) -> ErrorResponse | HTTPValidationError | SuccessResponse | None: """Revoke API Key Revoke (deactivate) an API key. @@ -165,7 +165,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, SuccessResponse] + ErrorResponse | HTTPValidationError | SuccessResponse """ return ( diff --git a/robosystems_client/api/user/update_user.py b/robosystems_client/api/user/update_user.py index e5b7cad..99239da 100644 --- a/robosystems_client/api/user/update_user.py +++ b/robosystems_client/api/user/update_user.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -31,8 +31,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, UserResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | UserResponse | None: if response.status_code == 200: response_200 = UserResponse.from_dict(response.json()) @@ -50,8 +50,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, UserResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | UserResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -64,7 +64,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: UpdateUserRequest, -) -> Response[Union[HTTPValidationError, UserResponse]]: +) -> Response[HTTPValidationError | UserResponse]: """Update User Profile Update the current user's profile information. @@ -77,7 +77,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, UserResponse]] + Response[HTTPValidationError | UserResponse] """ kwargs = _get_kwargs( @@ -95,7 +95,7 @@ def sync( *, client: AuthenticatedClient, body: UpdateUserRequest, -) -> Optional[Union[HTTPValidationError, UserResponse]]: +) -> HTTPValidationError | UserResponse | None: """Update User Profile Update the current user's profile information. @@ -108,7 +108,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, UserResponse] + HTTPValidationError | UserResponse """ return sync_detailed( @@ -121,7 +121,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: UpdateUserRequest, -) -> Response[Union[HTTPValidationError, UserResponse]]: +) -> Response[HTTPValidationError | UserResponse]: """Update User Profile Update the current user's profile information. @@ -134,7 +134,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, UserResponse]] + Response[HTTPValidationError | UserResponse] """ kwargs = _get_kwargs( @@ -150,7 +150,7 @@ async def asyncio( *, client: AuthenticatedClient, body: UpdateUserRequest, -) -> Optional[Union[HTTPValidationError, UserResponse]]: +) -> HTTPValidationError | UserResponse | None: """Update User Profile Update the current user's profile information. @@ -163,7 +163,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, UserResponse] + HTTPValidationError | UserResponse """ return ( diff --git a/robosystems_client/api/user/update_user_api_key.py b/robosystems_client/api/user/update_user_api_key.py index a9450d2..17b9e52 100644 --- a/robosystems_client/api/user/update_user_api_key.py +++ b/robosystems_client/api/user/update_user_api_key.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -32,8 +32,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[APIKeyInfo, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> APIKeyInfo | HTTPValidationError | None: if response.status_code == 200: response_200 = APIKeyInfo.from_dict(response.json()) @@ -51,8 +51,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[APIKeyInfo, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[APIKeyInfo | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -66,7 +66,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: UpdateAPIKeyRequest, -) -> Response[Union[APIKeyInfo, HTTPValidationError]]: +) -> Response[APIKeyInfo | HTTPValidationError]: """Update API Key Update an API key's name or description. @@ -80,7 +80,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[APIKeyInfo, HTTPValidationError]] + Response[APIKeyInfo | HTTPValidationError] """ kwargs = _get_kwargs( @@ -100,7 +100,7 @@ def sync( *, client: AuthenticatedClient, body: UpdateAPIKeyRequest, -) -> Optional[Union[APIKeyInfo, HTTPValidationError]]: +) -> APIKeyInfo | HTTPValidationError | None: """Update API Key Update an API key's name or description. @@ -114,7 +114,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[APIKeyInfo, HTTPValidationError] + APIKeyInfo | HTTPValidationError """ return sync_detailed( @@ -129,7 +129,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: UpdateAPIKeyRequest, -) -> Response[Union[APIKeyInfo, HTTPValidationError]]: +) -> Response[APIKeyInfo | HTTPValidationError]: """Update API Key Update an API key's name or description. @@ -143,7 +143,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[APIKeyInfo, HTTPValidationError]] + Response[APIKeyInfo | HTTPValidationError] """ kwargs = _get_kwargs( @@ -161,7 +161,7 @@ async def asyncio( *, client: AuthenticatedClient, body: UpdateAPIKeyRequest, -) -> Optional[Union[APIKeyInfo, HTTPValidationError]]: +) -> APIKeyInfo | HTTPValidationError | None: """Update API Key Update an API key's name or description. @@ -175,7 +175,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[APIKeyInfo, HTTPValidationError] + APIKeyInfo | HTTPValidationError """ return ( diff --git a/robosystems_client/api/user/update_user_password.py b/robosystems_client/api/user/update_user_password.py index c39aca2..4008dee 100644 --- a/robosystems_client/api/user/update_user_password.py +++ b/robosystems_client/api/user/update_user_password.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Optional, Union +from typing import Any import httpx @@ -32,8 +32,8 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ErrorResponse | HTTPValidationError | SuccessResponse | None: if response.status_code == 200: response_200 = SuccessResponse.from_dict(response.json()) @@ -66,8 +66,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[ErrorResponse | HTTPValidationError | SuccessResponse]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -80,7 +80,7 @@ def sync_detailed( *, client: AuthenticatedClient, body: UpdatePasswordRequest, -) -> Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: +) -> Response[ErrorResponse | HTTPValidationError | SuccessResponse]: """Update Password Update the current user's password. @@ -93,7 +93,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]] + Response[ErrorResponse | HTTPValidationError | SuccessResponse] """ kwargs = _get_kwargs( @@ -111,7 +111,7 @@ def sync( *, client: AuthenticatedClient, body: UpdatePasswordRequest, -) -> Optional[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: +) -> ErrorResponse | HTTPValidationError | SuccessResponse | None: """Update Password Update the current user's password. @@ -124,7 +124,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, SuccessResponse] + ErrorResponse | HTTPValidationError | SuccessResponse """ return sync_detailed( @@ -137,7 +137,7 @@ async def asyncio_detailed( *, client: AuthenticatedClient, body: UpdatePasswordRequest, -) -> Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: +) -> Response[ErrorResponse | HTTPValidationError | SuccessResponse]: """Update Password Update the current user's password. @@ -150,7 +150,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[ErrorResponse, HTTPValidationError, SuccessResponse]] + Response[ErrorResponse | HTTPValidationError | SuccessResponse] """ kwargs = _get_kwargs( @@ -166,7 +166,7 @@ async def asyncio( *, client: AuthenticatedClient, body: UpdatePasswordRequest, -) -> Optional[Union[ErrorResponse, HTTPValidationError, SuccessResponse]]: +) -> ErrorResponse | HTTPValidationError | SuccessResponse | None: """Update Password Update the current user's password. @@ -179,7 +179,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[ErrorResponse, HTTPValidationError, SuccessResponse] + ErrorResponse | HTTPValidationError | SuccessResponse """ return ( diff --git a/robosystems_client/api/views/__init__.py b/robosystems_client/api/views/__init__.py new file mode 100644 index 0000000..2d7c0b2 --- /dev/null +++ b/robosystems_client/api/views/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/robosystems_client/api/views/create_view.py b/robosystems_client/api/views/create_view.py new file mode 100644 index 0000000..7f13fdb --- /dev/null +++ b/robosystems_client/api/views/create_view.py @@ -0,0 +1,245 @@ +from http import HTTPStatus +from typing import Any + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.create_view_request import CreateViewRequest +from ...models.http_validation_error import HTTPValidationError +from ...types import Response + + +def _get_kwargs( + graph_id: str, + *, + body: CreateViewRequest, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": f"/v1/graphs/{graph_id}/views", + } + + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | HTTPValidationError | None: + if response.status_code == 200: + response_200 = response.json() + return response_200 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | HTTPValidationError]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + body: CreateViewRequest, +) -> Response[Any | HTTPValidationError]: + """Create View + + Generate financial report view from data source (dual-mode support). + + **Mode 1: Transaction Aggregation (generate_from_transactions)** + - Aggregates raw transaction data to trial balance + - Creates facts on-demand + - Shows real-time reporting from source of truth + + **Mode 2: Existing Facts (pivot_existing_facts)** + - Queries existing Fact nodes + - Supports multi-dimensional analysis + - Works with SEC filings and pre-computed facts + + Both modes: + - Build FactGrid from data + - Generate pivot table presentation + - Return consistent response format + + Args: + graph_id (str): + body (CreateViewRequest): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + body=body, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + *, + client: AuthenticatedClient, + body: CreateViewRequest, +) -> Any | HTTPValidationError | None: + """Create View + + Generate financial report view from data source (dual-mode support). + + **Mode 1: Transaction Aggregation (generate_from_transactions)** + - Aggregates raw transaction data to trial balance + - Creates facts on-demand + - Shows real-time reporting from source of truth + + **Mode 2: Existing Facts (pivot_existing_facts)** + - Queries existing Fact nodes + - Supports multi-dimensional analysis + - Works with SEC filings and pre-computed facts + + Both modes: + - Build FactGrid from data + - Generate pivot table presentation + - Return consistent response format + + Args: + graph_id (str): + body (CreateViewRequest): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Any | HTTPValidationError + """ + + return sync_detailed( + graph_id=graph_id, + client=client, + body=body, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + body: CreateViewRequest, +) -> Response[Any | HTTPValidationError]: + """Create View + + Generate financial report view from data source (dual-mode support). + + **Mode 1: Transaction Aggregation (generate_from_transactions)** + - Aggregates raw transaction data to trial balance + - Creates facts on-demand + - Shows real-time reporting from source of truth + + **Mode 2: Existing Facts (pivot_existing_facts)** + - Queries existing Fact nodes + - Supports multi-dimensional analysis + - Works with SEC filings and pre-computed facts + + Both modes: + - Build FactGrid from data + - Generate pivot table presentation + - Return consistent response format + + Args: + graph_id (str): + body (CreateViewRequest): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any | HTTPValidationError] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + body=body, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + *, + client: AuthenticatedClient, + body: CreateViewRequest, +) -> Any | HTTPValidationError | None: + """Create View + + Generate financial report view from data source (dual-mode support). + + **Mode 1: Transaction Aggregation (generate_from_transactions)** + - Aggregates raw transaction data to trial balance + - Creates facts on-demand + - Shows real-time reporting from source of truth + + **Mode 2: Existing Facts (pivot_existing_facts)** + - Queries existing Fact nodes + - Supports multi-dimensional analysis + - Works with SEC filings and pre-computed facts + + Both modes: + - Build FactGrid from data + - Generate pivot table presentation + - Return consistent response format + + Args: + graph_id (str): + body (CreateViewRequest): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Any | HTTPValidationError + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + client=client, + body=body, + ) + ).parsed diff --git a/robosystems_client/api/views/save_view.py b/robosystems_client/api/views/save_view.py new file mode 100644 index 0000000..6a626d4 --- /dev/null +++ b/robosystems_client/api/views/save_view.py @@ -0,0 +1,299 @@ +from http import HTTPStatus +from typing import Any + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.http_validation_error import HTTPValidationError +from ...models.save_view_request import SaveViewRequest +from ...models.save_view_response import SaveViewResponse +from ...types import Response + + +def _get_kwargs( + graph_id: str, + *, + body: SaveViewRequest, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": f"/v1/graphs/{graph_id}/views/save", + } + + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | SaveViewResponse | None: + if response.status_code == 200: + response_200 = SaveViewResponse.from_dict(response.json()) + + return response_200 + + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | SaveViewResponse]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + body: SaveViewRequest, +) -> Response[HTTPValidationError | SaveViewResponse]: + """Save View + + Save or update view as materialized report in the graph. + + Converts computed view results into persistent Report, Fact, and Structure nodes. + This establishes what data exists in the subgraph, which then defines what + needs to be exported for publishing to the parent graph. + + **Create Mode** (no report_id provided): + - Generates new report_id from entity + period + report type + - Creates new Report, Facts, and Structures + + **Update Mode** (report_id provided): + - Deletes all existing Facts and Structures for the report + - Updates Report metadata + - Creates fresh Facts and Structures from current view + - Useful for refreshing reports with updated data or view configurations + + **This is NOT publishing** - it only creates nodes in the subgraph workspace. + Publishing (export → parquet → parent ingest) happens separately. + + Creates/Updates: + - Report node with metadata + - Fact nodes with all aspects (period, entity, element, unit) + - PresentationStructure nodes (how facts are displayed) + - CalculationStructure nodes (how facts roll up) + + Returns: + - report_id: Unique identifier used as parquet export prefix + - parquet_export_prefix: Filename prefix for future exports + - All created facts and structures + + Args: + graph_id (str): + body (SaveViewRequest): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[HTTPValidationError | SaveViewResponse] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + body=body, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + graph_id: str, + *, + client: AuthenticatedClient, + body: SaveViewRequest, +) -> HTTPValidationError | SaveViewResponse | None: + """Save View + + Save or update view as materialized report in the graph. + + Converts computed view results into persistent Report, Fact, and Structure nodes. + This establishes what data exists in the subgraph, which then defines what + needs to be exported for publishing to the parent graph. + + **Create Mode** (no report_id provided): + - Generates new report_id from entity + period + report type + - Creates new Report, Facts, and Structures + + **Update Mode** (report_id provided): + - Deletes all existing Facts and Structures for the report + - Updates Report metadata + - Creates fresh Facts and Structures from current view + - Useful for refreshing reports with updated data or view configurations + + **This is NOT publishing** - it only creates nodes in the subgraph workspace. + Publishing (export → parquet → parent ingest) happens separately. + + Creates/Updates: + - Report node with metadata + - Fact nodes with all aspects (period, entity, element, unit) + - PresentationStructure nodes (how facts are displayed) + - CalculationStructure nodes (how facts roll up) + + Returns: + - report_id: Unique identifier used as parquet export prefix + - parquet_export_prefix: Filename prefix for future exports + - All created facts and structures + + Args: + graph_id (str): + body (SaveViewRequest): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + HTTPValidationError | SaveViewResponse + """ + + return sync_detailed( + graph_id=graph_id, + client=client, + body=body, + ).parsed + + +async def asyncio_detailed( + graph_id: str, + *, + client: AuthenticatedClient, + body: SaveViewRequest, +) -> Response[HTTPValidationError | SaveViewResponse]: + """Save View + + Save or update view as materialized report in the graph. + + Converts computed view results into persistent Report, Fact, and Structure nodes. + This establishes what data exists in the subgraph, which then defines what + needs to be exported for publishing to the parent graph. + + **Create Mode** (no report_id provided): + - Generates new report_id from entity + period + report type + - Creates new Report, Facts, and Structures + + **Update Mode** (report_id provided): + - Deletes all existing Facts and Structures for the report + - Updates Report metadata + - Creates fresh Facts and Structures from current view + - Useful for refreshing reports with updated data or view configurations + + **This is NOT publishing** - it only creates nodes in the subgraph workspace. + Publishing (export → parquet → parent ingest) happens separately. + + Creates/Updates: + - Report node with metadata + - Fact nodes with all aspects (period, entity, element, unit) + - PresentationStructure nodes (how facts are displayed) + - CalculationStructure nodes (how facts roll up) + + Returns: + - report_id: Unique identifier used as parquet export prefix + - parquet_export_prefix: Filename prefix for future exports + - All created facts and structures + + Args: + graph_id (str): + body (SaveViewRequest): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[HTTPValidationError | SaveViewResponse] + """ + + kwargs = _get_kwargs( + graph_id=graph_id, + body=body, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + graph_id: str, + *, + client: AuthenticatedClient, + body: SaveViewRequest, +) -> HTTPValidationError | SaveViewResponse | None: + """Save View + + Save or update view as materialized report in the graph. + + Converts computed view results into persistent Report, Fact, and Structure nodes. + This establishes what data exists in the subgraph, which then defines what + needs to be exported for publishing to the parent graph. + + **Create Mode** (no report_id provided): + - Generates new report_id from entity + period + report type + - Creates new Report, Facts, and Structures + + **Update Mode** (report_id provided): + - Deletes all existing Facts and Structures for the report + - Updates Report metadata + - Creates fresh Facts and Structures from current view + - Useful for refreshing reports with updated data or view configurations + + **This is NOT publishing** - it only creates nodes in the subgraph workspace. + Publishing (export → parquet → parent ingest) happens separately. + + Creates/Updates: + - Report node with metadata + - Fact nodes with all aspects (period, entity, element, unit) + - PresentationStructure nodes (how facts are displayed) + - CalculationStructure nodes (how facts roll up) + + Returns: + - report_id: Unique identifier used as parquet export prefix + - parquet_export_prefix: Filename prefix for future exports + - All created facts and structures + + Args: + graph_id (str): + body (SaveViewRequest): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + HTTPValidationError | SaveViewResponse + """ + + return ( + await asyncio_detailed( + graph_id=graph_id, + client=client, + body=body, + ) + ).parsed diff --git a/robosystems_client/client.py b/robosystems_client/client.py index d482fc2..06384dc 100644 --- a/robosystems_client/client.py +++ b/robosystems_client/client.py @@ -1,5 +1,5 @@ import ssl -from typing import Any, Optional, Union +from typing import Any import httpx from attrs import define, evolve, field @@ -38,14 +38,14 @@ class Client: _base_url: str = field(alias="base_url") _cookies: dict[str, str] = field(factory=dict, kw_only=True, alias="cookies") _headers: dict[str, str] = field(factory=dict, kw_only=True, alias="headers") - _timeout: Optional[httpx.Timeout] = field(default=None, kw_only=True, alias="timeout") - _verify_ssl: Union[str, bool, ssl.SSLContext] = field( + _timeout: httpx.Timeout | None = field(default=None, kw_only=True, alias="timeout") + _verify_ssl: str | bool | ssl.SSLContext = field( default=True, kw_only=True, alias="verify_ssl" ) _follow_redirects: bool = field(default=False, kw_only=True, alias="follow_redirects") _httpx_args: dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args") - _client: Optional[httpx.Client] = field(default=None, init=False) - _async_client: Optional[httpx.AsyncClient] = field(default=None, init=False) + _client: httpx.Client | None = field(default=None, init=False) + _async_client: httpx.AsyncClient | None = field(default=None, init=False) def with_headers(self, headers: dict[str, str]) -> "Client": """Get a new client matching this one with additional headers""" @@ -64,7 +64,7 @@ def with_cookies(self, cookies: dict[str, str]) -> "Client": return evolve(self, cookies={**self._cookies, **cookies}) def with_timeout(self, timeout: httpx.Timeout) -> "Client": - """Get a new client matching this one with a new timeout (in seconds)""" + """Get a new client matching this one with a new timeout configuration""" if self._client is not None: self._client.timeout = timeout if self._async_client is not None: @@ -103,7 +103,7 @@ def __exit__(self, *args: Any, **kwargs: Any) -> None: self.get_httpx_client().__exit__(*args, **kwargs) def set_async_httpx_client(self, async_client: httpx.AsyncClient) -> "Client": - """Manually the underlying httpx.AsyncClient + """Manually set the underlying httpx.AsyncClient **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. """ @@ -170,14 +170,14 @@ class AuthenticatedClient: _base_url: str = field(alias="base_url") _cookies: dict[str, str] = field(factory=dict, kw_only=True, alias="cookies") _headers: dict[str, str] = field(factory=dict, kw_only=True, alias="headers") - _timeout: Optional[httpx.Timeout] = field(default=None, kw_only=True, alias="timeout") - _verify_ssl: Union[str, bool, ssl.SSLContext] = field( + _timeout: httpx.Timeout | None = field(default=None, kw_only=True, alias="timeout") + _verify_ssl: str | bool | ssl.SSLContext = field( default=True, kw_only=True, alias="verify_ssl" ) _follow_redirects: bool = field(default=False, kw_only=True, alias="follow_redirects") _httpx_args: dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args") - _client: Optional[httpx.Client] = field(default=None, init=False) - _async_client: Optional[httpx.AsyncClient] = field(default=None, init=False) + _client: httpx.Client | None = field(default=None, init=False) + _async_client: httpx.AsyncClient | None = field(default=None, init=False) token: str prefix: str = "Bearer" @@ -200,7 +200,7 @@ def with_cookies(self, cookies: dict[str, str]) -> "AuthenticatedClient": return evolve(self, cookies={**self._cookies, **cookies}) def with_timeout(self, timeout: httpx.Timeout) -> "AuthenticatedClient": - """Get a new client matching this one with a new timeout (in seconds)""" + """Get a new client matching this one with a new timeout configuration""" if self._client is not None: self._client.timeout = timeout if self._async_client is not None: @@ -244,7 +244,7 @@ def __exit__(self, *args: Any, **kwargs: Any) -> None: def set_async_httpx_client( self, async_client: httpx.AsyncClient ) -> "AuthenticatedClient": - """Manually the underlying httpx.AsyncClient + """Manually set the underlying httpx.AsyncClient **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. """ diff --git a/robosystems_client/extensions/__init__.py b/robosystems_client/extensions/__init__.py index d65187e..edf3281 100644 --- a/robosystems_client/extensions/__init__.py +++ b/robosystems_client/extensions/__init__.py @@ -46,6 +46,27 @@ RoboSystemsExtensionConfig, AsyncRoboSystemsExtensions, ) +from .element_mapping_client import ( + ElementMappingClient, + MappingStructure, + ElementAssociation, + AggregationMethod, +) +from .subgraph_workspace_client import ( + SubgraphWorkspaceClient, + SubgraphWorkspace, + StorageType, + ExportResult, + PublishResult, +) +from .view_builder_client import ( + ViewBuilderClient, + ViewSourceType, + ViewSource, + ViewAxis, + ViewConfig, + ViewResponse, +) from .utils import ( QueryBuilder, ResultProcessor, @@ -114,6 +135,24 @@ "RoboSystemsExtensions", "RoboSystemsExtensionConfig", "AsyncRoboSystemsExtensions", + # Element Mapping Client + "ElementMappingClient", + "MappingStructure", + "ElementAssociation", + "AggregationMethod", + # Subgraph Workspace Client + "SubgraphWorkspaceClient", + "SubgraphWorkspace", + "StorageType", + "ExportResult", + "PublishResult", + # View Builder Client + "ViewBuilderClient", + "ViewSourceType", + "ViewSource", + "ViewAxis", + "ViewConfig", + "ViewResponse", # SSE Client "SSEClient", "EventType", diff --git a/robosystems_client/extensions/element_mapping_client.py b/robosystems_client/extensions/element_mapping_client.py new file mode 100644 index 0000000..86f6d23 --- /dev/null +++ b/robosystems_client/extensions/element_mapping_client.py @@ -0,0 +1,585 @@ +"""Element Mapping Client Extension + +Client-side extension for managing element mappings in subgraph workspaces. +Constructs Cypher queries for execution via the public /query endpoint. + +This replaces server-side mapping endpoints with client-side logic, +following the architecture where mappings are written to subgraphs +and later published to the main graph via parquet export/ingest. +""" + +import uuid +from typing import List, Optional +from dataclasses import dataclass +from enum import Enum +import pandas as pd + + +class AggregationMethod(Enum): + """Aggregation methods for element mapping""" + + SUM = "sum" + AVERAGE = "average" + WEIGHTED_AVERAGE = "weighted_average" + FIRST = "first" + LAST = "last" + CALCULATED = "calculated" + + +@dataclass +class MappingStructure: + """Element mapping structure""" + + identifier: str + name: str + description: Optional[str] = None + taxonomy_uri: Optional[str] = None + target_taxonomy_uri: Optional[str] = None + associations: Optional[List["ElementAssociation"]] = None + + def __post_init__(self): + if self.associations is None: + self.associations = [] + + +@dataclass +class ElementAssociation: + """Association between source and target elements""" + + identifier: str + source_element: str + target_element: str + aggregation_method: AggregationMethod = AggregationMethod.SUM + weight: float = 1.0 + formula: Optional[str] = None + order_value: float = 1.0 + + +class ElementMappingClient: + """ + Client for managing element mappings in subgraph workspaces. + + All operations construct Cypher queries that are executed via the + public /query endpoint against a subgraph workspace. + """ + + def __init__(self, query_client): + """ + Initialize with a query client for executing Cypher. + + Args: + query_client: RoboSystemsExtensions query client + """ + self.query = query_client + + def _generate_uuid(self, seed: str = None) -> str: + """Generate a deterministic or random UUID""" + if seed: + # For deterministic UUID based on seed + import hashlib + + hash_obj = hashlib.sha256(seed.encode()) + hash_hex = hash_obj.hexdigest() + return f"{hash_hex[:8]}-{hash_hex[8:12]}-7{hash_hex[13:16]}-{hash_hex[16:20]}-{hash_hex[20:32]}" + else: + return str(uuid.uuid4()) + + async def create_mapping_structure( + self, + graph_id: str, + name: str, + description: str = None, + taxonomy_uri: str = None, + target_taxonomy_uri: str = None, + ) -> MappingStructure: + """ + Create a new element mapping structure in the subgraph. + + Args: + graph_id: Subgraph workspace ID (e.g., kg123_workspace) + name: Name of the mapping structure + description: Optional description + taxonomy_uri: Source taxonomy URI (e.g., "qb:chart-of-accounts") + target_taxonomy_uri: Target taxonomy URI (e.g., "us-gaap:2024") + + Returns: + Created MappingStructure + """ + structure_id = self._generate_uuid(f"mapping_structure_{name}_{graph_id}") + + cypher = """ + CREATE (s:Structure { + identifier: $identifier, + type: 'ElementMapping', + name: $name, + definition: $description, + uri: $uri, + network_uri: $network_uri + }) + RETURN s + """ + + params = { + "identifier": structure_id, + "name": name, + "description": description or "", + "uri": taxonomy_uri or "", + "network_uri": target_taxonomy_uri or "", + } + + self.query.query(graph_id, cypher, params) + + return MappingStructure( + identifier=structure_id, + name=name, + description=description, + taxonomy_uri=taxonomy_uri, + target_taxonomy_uri=target_taxonomy_uri, + associations=[], + ) + + async def get_mapping_structure( + self, graph_id: str, structure_id: str + ) -> Optional[MappingStructure]: + """ + Get a mapping structure with all its associations. + + Args: + graph_id: Subgraph workspace ID + structure_id: Structure identifier + + Returns: + MappingStructure with associations, or None if not found + """ + cypher = """ + MATCH (s:Structure) + WHERE s.identifier = $structure_id AND s.type = 'ElementMapping' + OPTIONAL MATCH (s)-[:STRUCTURE_HAS_ASSOCIATION]->(a:Association) + OPTIONAL MATCH (a)-[:ASSOCIATION_HAS_FROM_ELEMENT]->(from_el:Element) + OPTIONAL MATCH (a)-[:ASSOCIATION_HAS_TO_ELEMENT]->(to_el:Element) + RETURN s, + collect({ + identifier: a.identifier, + source_element: from_el.uri, + target_element: to_el.uri, + aggregation_method: a.preferred_label, + weight: a.weight, + order_value: a.order_value + }) as associations + """ + + result = self.query.query(graph_id, cypher, {"structure_id": structure_id}) + + if not result or not result.data: + return None + + row = result.data[0] + structure_data = row["s"] + + associations = [] + for assoc in row["associations"]: + if assoc["identifier"]: + associations.append( + ElementAssociation( + identifier=assoc["identifier"], + source_element=assoc["source_element"] or "", + target_element=assoc["target_element"] or "", + aggregation_method=AggregationMethod(assoc["aggregation_method"] or "sum"), + weight=assoc["weight"] or 1.0, + order_value=assoc["order_value"] or 1.0, + ) + ) + + return MappingStructure( + identifier=structure_data["identifier"], + name=structure_data["name"], + description=structure_data.get("definition"), + taxonomy_uri=structure_data.get("uri"), + target_taxonomy_uri=structure_data.get("network_uri"), + associations=associations, + ) + + async def list_mapping_structures(self, graph_id: str) -> List[MappingStructure]: + """ + List all mapping structures in the subgraph. + + Args: + graph_id: Subgraph workspace ID + + Returns: + List of MappingStructure objects + """ + cypher = """ + MATCH (s:Structure) + WHERE s.type = 'ElementMapping' + OPTIONAL MATCH (s)-[:STRUCTURE_HAS_ASSOCIATION]->(a:Association) + RETURN s, + count(a) as association_count + ORDER BY s.name + """ + + result = self.query.query(graph_id, cypher, {}) + + structures = [] + if result and result.data: + for row in result.data: + structure_data = row["s"] + structures.append( + MappingStructure( + identifier=structure_data["identifier"], + name=structure_data["name"], + description=structure_data.get("definition"), + taxonomy_uri=structure_data.get("uri"), + target_taxonomy_uri=structure_data.get("network_uri"), + associations=[], # Not loading associations in list view + ) + ) + + return structures + + async def create_association( + self, + graph_id: str, + structure_id: str, + source_element: str, + target_element: str, + aggregation_method: AggregationMethod = AggregationMethod.SUM, + weight: float = 1.0, + order_value: float = 1.0, + ) -> ElementAssociation: + """ + Add an association to a mapping structure. + + Creates an Association node linking source element to target element. + If the target element doesn't exist, it will be created. + + Args: + graph_id: Subgraph workspace ID + structure_id: Structure identifier + source_element: Source element URI (e.g., "qb:BankAccount1") + target_element: Target element URI (e.g., "us-gaap:Cash") + aggregation_method: How to aggregate values + weight: Weight for weighted aggregation + order_value: Display order + + Returns: + Created ElementAssociation + """ + association_id = self._generate_uuid( + f"association_{structure_id}_{source_element}_{target_element}" + ) + + # Generate identifier for target element + target_element_id = self._generate_uuid(f"element_{target_element}") + target_element_name = ( + target_element.split(":")[-1] if ":" in target_element else target_element + ) + + cypher = """ + MATCH (s:Structure) + WHERE s.identifier = $structure_id AND s.type = 'ElementMapping' + MATCH (from_el:Element {uri: $source_element}) + MERGE (to_el:Element {identifier: $target_element_identifier}) + ON CREATE SET to_el.uri = $target_element, to_el.name = $target_element_name + CREATE (a:Association { + identifier: $identifier, + association_type: 'ElementMapping', + arcrole: 'aggregation', + preferred_label: $preferred_label, + weight: $weight, + order_value: $order_value + }) + CREATE (s)-[:STRUCTURE_HAS_ASSOCIATION]->(a) + CREATE (a)-[:ASSOCIATION_HAS_FROM_ELEMENT]->(from_el) + CREATE (a)-[:ASSOCIATION_HAS_TO_ELEMENT]->(to_el) + RETURN a + """ + + params = { + "structure_id": structure_id, + "identifier": association_id, + "preferred_label": aggregation_method.value, + "weight": weight, + "order_value": order_value, + "source_element": source_element, + "target_element": target_element, + "target_element_identifier": target_element_id, + "target_element_name": target_element_name, + } + + self.query.query(graph_id, cypher, params) + + return ElementAssociation( + identifier=association_id, + source_element=source_element, + target_element=target_element, + aggregation_method=aggregation_method, + weight=weight, + order_value=order_value, + ) + + async def update_association( + self, + graph_id: str, + structure_id: str, + association_id: str, + aggregation_method: AggregationMethod = None, + weight: float = None, + order_value: float = None, + ) -> Optional[ElementAssociation]: + """ + Update an existing association. + + Args: + graph_id: Subgraph workspace ID + structure_id: Structure identifier + association_id: Association identifier + aggregation_method: New aggregation method + weight: New weight + order_value: New order value + + Returns: + Updated ElementAssociation, or None if not found + """ + set_clauses = [] + params = {"structure_id": structure_id, "association_id": association_id} + + if aggregation_method is not None: + set_clauses.append("a.preferred_label = $aggregation_method") + params["aggregation_method"] = aggregation_method.value + + if weight is not None: + set_clauses.append("a.weight = $weight") + params["weight"] = weight + + if order_value is not None: + set_clauses.append("a.order_value = $order_value") + params["order_value"] = order_value + + if not set_clauses: + return None + + update_cypher = f""" + MATCH (s:Structure)-[:STRUCTURE_HAS_ASSOCIATION]->(a:Association) + WHERE s.identifier = $structure_id AND a.identifier = $association_id + SET {", ".join(set_clauses)} + RETURN a + """ + + self.query.query(graph_id, update_cypher, params) + + # Get updated association + get_cypher = """ + MATCH (s:Structure)-[:STRUCTURE_HAS_ASSOCIATION]->(a:Association) + WHERE s.identifier = $structure_id AND a.identifier = $association_id + MATCH (a)-[:ASSOCIATION_HAS_FROM_ELEMENT]->(from_el:Element) + MATCH (a)-[:ASSOCIATION_HAS_TO_ELEMENT]->(to_el:Element) + RETURN a, from_el.uri as source_element, to_el.uri as target_element + """ + + result = self.query.query(graph_id, get_cypher, params) + + if not result or not result.data: + return None + + row = result.data[0] + assoc_data = row["a"] + + return ElementAssociation( + identifier=assoc_data["identifier"], + source_element=row["source_element"], + target_element=row["target_element"], + aggregation_method=AggregationMethod(assoc_data["preferred_label"]), + weight=assoc_data["weight"], + order_value=assoc_data["order_value"], + ) + + async def delete_association( + self, graph_id: str, structure_id: str, association_id: str + ) -> bool: + """ + Delete an association from a mapping structure. + + Args: + graph_id: Subgraph workspace ID + structure_id: Structure identifier + association_id: Association identifier + + Returns: + True if deleted + """ + cypher = """ + MATCH (s:Structure)-[:STRUCTURE_HAS_ASSOCIATION]->(a:Association) + WHERE s.identifier = $structure_id AND a.identifier = $association_id + DETACH DELETE a + """ + + self.query.query( + graph_id, cypher, {"structure_id": structure_id, "association_id": association_id} + ) + + return True + + async def delete_mapping_structure(self, graph_id: str, structure_id: str) -> bool: + """ + Delete a mapping structure and all its associations. + + Args: + graph_id: Subgraph workspace ID + structure_id: Structure identifier + + Returns: + True if deleted + """ + cypher = """ + MATCH (s:Structure) + WHERE s.identifier = $structure_id AND s.type = 'ElementMapping' + OPTIONAL MATCH (s)-[:STRUCTURE_HAS_ASSOCIATION]->(a:Association) + DETACH DELETE s, a + """ + + self.query.query(graph_id, cypher, {"structure_id": structure_id}) + + return True + + @staticmethod + def apply_element_mapping( + fact_data: pd.DataFrame, mapping_structure: MappingStructure + ) -> pd.DataFrame: + """ + Apply element mapping to aggregate source elements into target elements. + + This is a client-side pandas operation that doesn't require graph access. + + Args: + fact_data: DataFrame with columns including element_id, numeric_value, etc. + mapping_structure: MappingStructure with associations defining aggregation + + Returns: + DataFrame with aggregated facts mapped to target elements + """ + if fact_data.empty or not mapping_structure.associations: + return fact_data + + df = fact_data.copy() + aggregated_rows = [] + + # Handle both numeric_value (from facts) and net_balance (from trial balance) + value_col = "numeric_value" if "numeric_value" in df.columns else "net_balance" + + # Group associations by target element + target_groups = {} + for assoc in mapping_structure.associations: + if assoc.target_element not in target_groups: + target_groups[assoc.target_element] = [] + target_groups[assoc.target_element].append(assoc) + + # Build URI to ID mapping if both columns exist + uri_to_id_map = {} + if "element_uri" in df.columns and "element_id" in df.columns: + for _, row in df[["element_uri", "element_id"]].drop_duplicates().iterrows(): + uri_to_id_map[row["element_uri"]] = row["element_id"] + + # Determine groupby columns + groupby_columns = [] + if "period_end" in df.columns: + groupby_columns.append("period_end") + if "period_start" in df.columns: + groupby_columns.append("period_start") + if "entity_id" in df.columns: + groupby_columns.append("entity_id") + if "dimension_axis" in df.columns: + groupby_columns.append("dimension_axis") + if "dimension_member" in df.columns: + groupby_columns.append("dimension_member") + + # Aggregate for each target element + for target_element, associations in target_groups.items(): + # Map source URIs to IDs + source_element_uris = [assoc.source_element for assoc in associations] + source_element_ids = [uri_to_id_map.get(uri, uri) for uri in source_element_uris] + + # Filter source facts + source_facts = df[df["element_id"].isin(source_element_ids)].copy() + + if source_facts.empty: + continue + + aggregation_method = associations[0].aggregation_method + + if groupby_columns: + # Group and aggregate + for group_keys, group_df in source_facts.groupby(groupby_columns): + aggregated_value = ElementMappingClient._aggregate_values( + group_df, associations, aggregation_method, value_col + ) + + # Create aggregated row + aggregated_row = group_df.iloc[0].copy() + aggregated_row["element_id"] = target_element + aggregated_row["element_name"] = target_element.split(":")[-1] + aggregated_row[value_col] = aggregated_value + + if "element_label" in aggregated_row: + aggregated_row["element_label"] = target_element.split(":")[-1] + + aggregated_rows.append(aggregated_row) + else: + # No grouping, aggregate all + aggregated_value = ElementMappingClient._aggregate_values( + source_facts, associations, aggregation_method, value_col + ) + + aggregated_row = source_facts.iloc[0].copy() + aggregated_row["element_id"] = target_element + aggregated_row["element_name"] = target_element.split(":")[-1] + aggregated_row[value_col] = aggregated_value + + if "element_label" in aggregated_row: + aggregated_row["element_label"] = target_element.split(":")[-1] + + aggregated_rows.append(aggregated_row) + + if not aggregated_rows: + return df + + return pd.DataFrame(aggregated_rows) + + @staticmethod + def _aggregate_values( + facts: pd.DataFrame, + associations: List[ElementAssociation], + method: AggregationMethod, + value_col: str, + ) -> float: + """Helper function to aggregate values based on method.""" + if method == AggregationMethod.SUM: + return facts[value_col].sum() + + elif method == AggregationMethod.AVERAGE: + return facts[value_col].mean() + + elif method == AggregationMethod.WEIGHTED_AVERAGE: + weights_map = {assoc.source_element: assoc.weight for assoc in associations} + facts_with_weights = facts.copy() + facts_with_weights["weight"] = facts_with_weights["element_id"].map(weights_map) + facts_with_weights["weighted_value"] = ( + facts_with_weights[value_col] * facts_with_weights["weight"] + ) + total_weight = facts_with_weights["weight"].sum() + if total_weight == 0: + return 0.0 + return facts_with_weights["weighted_value"].sum() / total_weight + + elif method == AggregationMethod.FIRST: + return facts[value_col].iloc[0] + + elif method == AggregationMethod.LAST: + return facts[value_col].iloc[-1] + + elif method == AggregationMethod.CALCULATED: + # For calculated, use sum as default (could be customized) + return facts[value_col].sum() + + # Default to sum + return facts[value_col].sum() diff --git a/robosystems_client/extensions/subgraph_workspace_client.py b/robosystems_client/extensions/subgraph_workspace_client.py new file mode 100644 index 0000000..34b080c --- /dev/null +++ b/robosystems_client/extensions/subgraph_workspace_client.py @@ -0,0 +1,744 @@ +"""Subgraph Workspace Client Extension + +Client for managing subgraph workspaces following the Financial Report Creator architecture. +Supports creating isolated workspaces, transferring data, and publishing to main graph. +""" + +from typing import List, Optional, Dict, Any +from dataclasses import dataclass +from enum import Enum +import json +import httpx + + +class StorageType(Enum): + """Subgraph storage types""" + + IN_MEMORY = "in_memory" + DISK_BASED = "disk_based" + + +@dataclass +class SubgraphWorkspace: + """Represents a subgraph workspace""" + + graph_id: str # Full subgraph ID (e.g., kg123_workspace) + parent_id: str # Parent graph ID (e.g., kg123) + name: str # Workspace name (e.g., workspace) + display_name: str + storage_type: StorageType + created_at: str + status: str = "active" + fork_status: Optional[Dict[str, Any]] = None # Fork operation status if forked + + +@dataclass +class ExportResult: + """Result from exporting subgraph to parquet""" + + shared_filename: str + files_exported: List[Dict[str, Any]] + total_rows: int + execution_time_ms: int + + +@dataclass +class PublishResult: + """Result from publishing subgraph to main graph""" + + nodes_created: int + relationships_created: int + execution_time_ms: int + success: bool + + +class SubgraphWorkspaceClient: + """ + Client for managing subgraph workspaces. + + Provides functionality to: + - Create/delete subgraph workspaces + - Transfer data from main graph to subgraph + - Export subgraph to parquet files + - Publish subgraph to main graph via incremental ingestion + """ + + def __init__(self, api_client, query_client=None): + """ + Initialize with API client and optional query client. + + Args: + api_client: RoboSystems API client for subgraph operations + query_client: Optional query client for executing Cypher + """ + self.api = api_client + self.query = query_client + + async def create_workspace( + self, + parent_graph_id: str, + name: str, + display_name: str = None, + storage_type: StorageType = StorageType.IN_MEMORY, + schema_extensions: List[str] = None, + fork_parent: bool = False, + ) -> SubgraphWorkspace: + """ + Create a new subgraph workspace. + + Args: + parent_graph_id: Parent graph ID (e.g., kg123) + name: Workspace name (alphanumeric, 1-20 chars) + display_name: Optional human-readable name + storage_type: IN_MEMORY (fast, <10K nodes) or DISK_BASED (persistent, >100K nodes) + schema_extensions: List of schema extensions (e.g., ["roboledger"]) + fork_parent: If True, copy all data from parent graph (creates a "fork") + + Returns: + SubgraphWorkspace object + """ + # Construct request for subgraph creation + request_body = { + "name": name, + "display_name": display_name or f"Workspace {name}", + "storage_type": storage_type.value, + "schema_extensions": schema_extensions or ["roboledger"], + "fork_parent": fork_parent, # Pass fork flag to server + } + + # Use httpx to call API + async with httpx.AsyncClient() as client: + headers = {"X-API-Key": self.api.token, "Content-Type": "application/json"} + response = await client.post( + f"{self.api._base_url}/v1/graphs/{parent_graph_id}/subgraphs", + json=request_body, + headers=headers, + ) + result = response.json() + + # Construct full subgraph ID + subgraph_id = f"{parent_graph_id}_{name}" + + workspace = SubgraphWorkspace( + graph_id=subgraph_id, + parent_id=parent_graph_id, + name=name, + display_name=result.get("display_name", display_name), + storage_type=storage_type, + created_at=result.get("created_at"), + status="active", + ) + + # If fork_parent is True, trigger server-side fork + # The server will handle the fork operation with progress monitoring + if fork_parent: + # Fork happens server-side during creation when fork_parent=True + # For client-side monitoring, use fork_from_parent_with_sse() method + pass + + return workspace + + async def create_workspace_with_fork( + self, + parent_graph_id: str, + name: str, + display_name: str = None, + fork_parent: bool = True, + fork_options: Dict[str, Any] = None, + progress_callback: Optional[callable] = None, + ) -> SubgraphWorkspace: + """ + Create a subgraph workspace with fork from parent using SSE monitoring. + + This method creates a subgraph and monitors the fork operation via SSE + if fork_parent=True. The fork operation copies data from the parent + graph to the new subgraph. + + Args: + parent_graph_id: Parent graph ID + name: Workspace name (alphanumeric only, 1-20 chars) + display_name: Human-readable display name + fork_parent: If True, fork data from parent graph + fork_options: Fork options dict: + - tables: List of tables to copy or "all" (default: "all") + - exclude_patterns: List of table patterns to exclude (e.g., ["Report*"]) + progress_callback: Optional callback(msg: str, pct: float) for progress updates + + Returns: + SubgraphWorkspace with fork_status if fork was performed + """ + # Create request body + request_body = { + "name": name, + "display_name": display_name or f"Workspace {name}", + "fork_parent": fork_parent, + "metadata": {"fork_options": fork_options} if fork_options else None, + } + + # Use httpx directly to call API + async with httpx.AsyncClient() as client: + headers = {"X-API-Key": self.api.token, "Content-Type": "application/json"} + + # Call API to create subgraph with fork + response = await client.post( + f"{self.api._base_url}/v1/graphs/{parent_graph_id}/subgraphs", + json=request_body, + headers=headers, + ) + result = response.json() + + # If fork_parent=True, response includes operation_id for SSE monitoring + if fork_parent and "operation_id" in result: + operation_id = result["operation_id"] + + # Monitor fork progress via SSE + if progress_callback: + # Connect to SSE endpoint + sse_url = f"{self.api._base_url}/v1/operations/{operation_id}/stream" + headers = {"X-API-Key": self.api.token} + + async with httpx.AsyncClient() as client: + async with client.stream("GET", sse_url, headers=headers) as sse_response: + async for line in sse_response.aiter_lines(): + if line.startswith("data: "): + try: + event_data = json.loads(line[6:]) + if event_data.get("event") == "operation_progress": + msg = event_data.get("data", {}).get("message", "") + pct = event_data.get("data", {}).get("progress_percent", 0) + if progress_callback: + await progress_callback(msg, pct) + elif event_data.get("event") == "operation_completed": + if progress_callback: + await progress_callback("Fork completed", 100) + break + elif event_data.get("event") == "operation_error": + error = event_data.get("data", {}).get("error", "Unknown error") + if progress_callback: + await progress_callback(f"Fork failed: {error}", 0) + break + except json.JSONDecodeError: + continue + + # Get final status via API + async with httpx.AsyncClient() as client: + headers = {"X-API-Key": self.api.token} + status_response = await client.get( + f"{self.api._base_url}/v1/operations/{operation_id}/status", headers=headers + ) + final_status = status_response.json() + + # Construct full subgraph ID + subgraph_id = f"{parent_graph_id}_{name}" + + workspace = SubgraphWorkspace( + graph_id=subgraph_id, + parent_id=parent_graph_id, + name=name, + display_name=display_name or f"Workspace {name}", + storage_type=StorageType.IN_MEMORY, + created_at=final_status.get("created_at"), + status="active", + fork_status=final_status.get("result", {}).get("fork_status"), + ) + + return workspace + + # Non-fork path (immediate response) + subgraph_id = f"{parent_graph_id}_{name}" + + return SubgraphWorkspace( + graph_id=subgraph_id, + parent_id=parent_graph_id, + name=name, + display_name=result.get("display_name", display_name), + storage_type=StorageType.IN_MEMORY, + created_at=result.get("created_at"), + status="active", + ) + + async def delete_workspace( + self, + parent_graph_id: str, + workspace_name: str, + force: bool = False, + create_backup: bool = False, + ) -> bool: + """ + Delete a subgraph workspace. + + Args: + parent_graph_id: Parent graph ID + workspace_name: Workspace name to delete + force: Force deletion even if subgraph contains data + create_backup: Create backup before deletion + + Returns: + True if deleted successfully + """ + params = {"force": force, "create_backup": create_backup} + + response = await self.api.delete( + f"/v1/graphs/{parent_graph_id}/subgraphs/{workspace_name}", params=params + ) + + return response.status_code == 200 + + async def list_workspaces(self, parent_graph_id: str) -> List[SubgraphWorkspace]: + """ + List all subgraph workspaces for a parent graph. + + Args: + parent_graph_id: Parent graph ID + + Returns: + List of SubgraphWorkspace objects + """ + response = await self.api.get(f"/v1/graphs/{parent_graph_id}/subgraphs") + subgraphs = response.json() + + workspaces = [] + for subgraph in subgraphs: + workspaces.append( + SubgraphWorkspace( + graph_id=f"{parent_graph_id}_{subgraph['name']}", + parent_id=parent_graph_id, + name=subgraph["name"], + display_name=subgraph.get("display_name", ""), + storage_type=StorageType(subgraph.get("storage_type", "in_memory")), + created_at=subgraph.get("created_at", ""), + status=subgraph.get("status", "active"), + ) + ) + + return workspaces + + async def copy_data_from_main_graph( + self, + workspace_id: str, + parent_graph_id: str, + node_types: List[str], + filters: Dict[str, Any] = None, + ) -> int: + """ + Copy data from main graph to subgraph workspace. + + Queries nodes from main graph and creates them in subgraph. + + Args: + workspace_id: Subgraph workspace ID (e.g., kg123_workspace) + parent_graph_id: Parent graph ID to query from + node_types: List of node types to copy (e.g., ["Element", "Period", "Unit"]) + filters: Optional filters for querying (e.g., {"period_end": "2024-12-31"}) + + Returns: + Number of nodes copied + """ + if not self.query: + raise ValueError("Query client required for data transfer") + + total_copied = 0 + + for node_type in node_types: + # Build query for main graph + where_clause = "" + if filters: + conditions = [] + for key, value in filters.items(): + conditions.append(f"n.{key} = '{value}'") + where_clause = f"WHERE {' AND '.join(conditions)}" + + # Query from main graph + query_cypher = f""" + MATCH (n:{node_type}) + {where_clause} + RETURN n + """ + + result = await self.query.query(parent_graph_id, query_cypher) + + if result and result.data: + # Batch create in subgraph + for batch in self._batch_nodes(result.data, 100): + create_cypher = self._build_batch_create_cypher(node_type, batch) + await self.query.query(workspace_id, create_cypher) + total_copied += len(batch) + + return total_copied + + async def copy_facts_with_aspects( + self, + workspace_id: str, + parent_graph_id: str, + fact_set_ids: List[str] = None, + period_start: str = None, + period_end: str = None, + entity_id: str = None, + ) -> int: + """ + Copy facts with all their aspects (element, period, unit, dimensions). + + This is optimized for copying complete fact data with relationships. + + Args: + workspace_id: Subgraph workspace ID + parent_graph_id: Parent graph ID + fact_set_ids: Optional list of fact set IDs to copy + period_start: Optional start date filter + period_end: Optional end date filter + entity_id: Optional entity filter + + Returns: + Number of facts copied + """ + if not self.query: + raise ValueError("Query client required for fact transfer") + + # Build WHERE clause + conditions = [] + params = {} + + if fact_set_ids: + conditions.append("fs.identifier IN $fact_set_ids") + params["fact_set_ids"] = fact_set_ids + + if period_start and period_end: + conditions.append("p.end_date >= $period_start AND p.end_date <= $period_end") + params["period_start"] = period_start + params["period_end"] = period_end + + if entity_id: + conditions.append("e.identifier = $entity_id") + params["entity_id"] = entity_id + + where_clause = f"WHERE {' AND '.join(conditions)}" if conditions else "" + + # Query facts with all relationships from main graph + query_cypher = f""" + MATCH (fs:FactSet)-[:FACT_SET_CONTAINS_FACT]->(f:Fact) + MATCH (f)-[:FACT_HAS_ELEMENT]->(el:Element) + MATCH (f)-[:FACT_HAS_PERIOD]->(p:Period) + MATCH (f)-[:FACT_HAS_ENTITY]->(e:Entity) + OPTIONAL MATCH (f)-[:FACT_HAS_UNIT]->(u:Unit) + OPTIONAL MATCH (f)-[:FACT_HAS_DIMENSION]->(d:FactDimension) + {where_clause} + RETURN f, el, p, e, u, collect(d) as dimensions + """ + + result = await self.query.query(parent_graph_id, query_cypher, params) + + if not result or not result.data: + return 0 + + # Create nodes and relationships in subgraph + # This would be done in batches with proper MERGE statements + facts_copied = 0 + + for row in result.data: + # Create fact and all related nodes in subgraph + create_cypher = self._build_fact_create_cypher(row) + await self.query.query(workspace_id, create_cypher) + facts_copied += 1 + + return facts_copied + + async def export_to_parquet( + self, workspace_id: str, shared_filename: str, tables: List[str] = None + ) -> ExportResult: + """ + Export subgraph to parquet files. + + Each table is exported to a separate parquet file with the shared filename + for provenance tracking and incremental ingestion. + + Args: + workspace_id: Subgraph workspace ID + shared_filename: Shared filename for all tables (e.g., "report_nvda_2024q4.parquet") + tables: List of tables to export (default: all report tables) + + Returns: + ExportResult with file details + """ + if tables is None: + tables = [ + "Report", + "ReportSection", + "FactSet", + "Fact", + "Structure", + "Association", + "Element", + "Period", + "Unit", + ] + + request_body = { + "shared_filename": shared_filename, + "export_all_tables": False, + "tables": tables, + } + + response = await self.api.post( + f"/v1/graphs/{workspace_id}/export", json=request_body + ) + + result = response.json() + + return ExportResult( + shared_filename=shared_filename, + files_exported=result.get("files_exported", []), + total_rows=sum(f.get("row_count", 0) for f in result.get("files_exported", [])), + execution_time_ms=result.get("execution_time_ms", 0), + ) + + async def publish_to_main_graph( + self, + workspace_id: str, + parent_graph_id: str, + shared_filename: str, + delete_workspace: bool = True, + ) -> PublishResult: + """ + Publish workspace to main graph via incremental ingestion. + + This is the complete publish flow: + 1. Export subgraph to parquet + 2. Incremental ingest to main graph (filtered by filename) + 3. Delete workspace (optional) + + Args: + workspace_id: Subgraph workspace ID + parent_graph_id: Parent graph to publish to + shared_filename: Filename to use for export/ingest + delete_workspace: Whether to delete workspace after publish + + Returns: + PublishResult with statistics + """ + # Step 1: Export to parquet + await self.export_to_parquet(workspace_id, shared_filename) + + # Step 2: Incremental ingest to main graph + ingest_request = { + "file_names": [shared_filename], # Filter to only this report + "ignore_errors": True, + "rebuild": False, + } + + response = await self.api.post( + f"/v1/graphs/{parent_graph_id}/tables/ingest", json=ingest_request + ) + + ingest_result = response.json() + + # Step 3: Delete workspace (optional) + if delete_workspace: + workspace_name = workspace_id.split("_")[-1] + await self.delete_workspace(parent_graph_id, workspace_name) + + return PublishResult( + nodes_created=ingest_result.get("nodes_created", 0), + relationships_created=ingest_result.get("relationships_created", 0), + execution_time_ms=ingest_result.get("execution_time_ms", 0), + success=ingest_result.get("status") == "success", + ) + + def _batch_nodes(self, nodes: List[Dict], batch_size: int): + """Helper to batch nodes for efficient creation""" + for i in range(0, len(nodes), batch_size): + yield nodes[i : i + batch_size] + + def _build_batch_create_cypher(self, node_type: str, nodes: List[Dict]) -> str: + """Helper to build batch CREATE cypher""" + creates = [] + for i, node_data in enumerate(nodes): + node = node_data.get("n", node_data) + props = json.dumps(node).replace('"', "'") + creates.append(f"CREATE (n{i}:{node_type} {props})") + + return "\n".join(creates) + + def _build_fact_create_cypher(self, row: Dict) -> str: + """Helper to build cypher for creating fact with all relationships""" + # This would build proper MERGE statements for fact and all related nodes + # Simplified for illustration + return f""" + MERGE (f:Fact {{identifier: '{row["f"]["identifier"]}'}}) + SET f = {json.dumps(row["f"]).replace('"', "'")} + + MERGE (el:Element {{identifier: '{row["el"]["identifier"]}'}}) + SET el = {json.dumps(row["el"]).replace('"', "'")} + + MERGE (f)-[:FACT_HAS_ELEMENT]->(el) + """ + + async def fork_from_parent_with_sse( + self, + workspace_id: str, + parent_graph_id: str, + fork_options: Optional[Dict[str, Any]] = None, + progress_callback: Optional[callable] = None, + ) -> Dict[str, Any]: + """ + Fork data from parent graph with SSE progress monitoring. + + This method triggers a server-side fork operation and monitors progress + via Server-Sent Events (SSE) for real-time updates on large operations. + + Args: + workspace_id: Target subgraph workspace ID + parent_graph_id: Source parent graph ID + fork_options: Options for selective forking: + - tables: List of tables or "all" + - period_filter: Date range filter + - entity_filter: Entity IDs to include + - exclude_patterns: Tables to exclude (e.g., ["Report*"]) + progress_callback: Optional callback for progress updates + + Returns: + Fork result with statistics + + Example: + >>> async def on_progress(event): + ... print(f"Progress: {event['message']}") + >>> + >>> result = await workspace_client.fork_from_parent_with_sse( + ... workspace_id="kg123_dev", + ... parent_graph_id="kg123", + ... fork_options={ + ... "tables": "all", + ... "exclude_patterns": ["Report*"], + ... "period_filter": {"start": "2024-01-01", "end": "2024-12-31"} + ... }, + ... progress_callback=on_progress + ... ) + """ + # Start fork operation and get SSE endpoint + fork_request = { + "operation": "fork", + "source_graph_id": parent_graph_id, + "target_graph_id": workspace_id, + "options": fork_options or {}, + } + + # Initiate fork operation + response = await self.api.post( + f"/v1/graphs/{workspace_id}/operations/fork", json=fork_request + ) + + operation = response.json() + operation_id = operation.get("operation_id") + + if not operation_id: + # If no SSE available, fall back to synchronous wait + return operation + + # Connect to SSE endpoint for progress monitoring + sse_url = f"{self.api.base_url}/v1/operations/{operation_id}/stream" + headers = getattr(self.api, "_headers", {}) + + async with httpx.AsyncClient() as client: + async with client.stream("GET", sse_url, headers=headers) as response: + async for line in response.aiter_lines(): + if line.startswith("data: "): + data = line[6:].strip() + if data == "[DONE]": + break + + try: + event = json.loads(data) + + # Call progress callback if provided + if progress_callback: + await progress_callback(event) + + # Check for completion + if event.get("status") in ["completed", "failed"]: + return event + + except json.JSONDecodeError: + continue + + # Get final status if SSE ended without completion event + final_response = await self.api.get(f"/v1/operations/{operation_id}") + return final_response.json() + + async def write_view_to_workspace( + self, workspace_id: str, view_data: Dict[str, Any], report_name: str = None + ) -> str: + """ + Write view/report data to subgraph workspace. + + Creates Report, FactSet, Structure, and Association nodes. + + Args: + workspace_id: Subgraph workspace ID + view_data: View data including facts, structures, associations + report_name: Optional report name + + Returns: + Report ID created + """ + if not self.query: + raise ValueError("Query client required for writing view data") + + import uuid + + report_id = str(uuid.uuid4()) + + # Create Report node + report_cypher = f""" + CREATE (r:Report {{ + identifier: '{report_id}', + name: '{report_name or "Draft Report"}', + status: 'draft', + created_at: datetime(), + ai_generated: false + }}) + RETURN r.identifier as report_id + """ + + await self.query.query(workspace_id, report_cypher) + + # Create FactSet and Facts + if "facts" in view_data: + factset_id = str(uuid.uuid4()) + factset_cypher = f""" + MATCH (r:Report {{identifier: '{report_id}'}}) + CREATE (fs:FactSet {{ + identifier: '{factset_id}', + name: 'View Facts' + }}) + CREATE (r)-[:REPORT_HAS_FACT_SET]->(fs) + """ + await self.query.query(workspace_id, factset_cypher) + + # Add facts to factset + for fact in view_data["facts"]: + fact_cypher = f""" + MATCH (fs:FactSet {{identifier: '{factset_id}'}}) + CREATE (f:Fact {{ + identifier: randomUUID(), + element_id: '{fact.get("element_id")}', + value: {fact.get("value", 0)}, + period_end: '{fact.get("period_end", "")}' + }}) + CREATE (fs)-[:FACT_SET_CONTAINS_FACT]->(f) + """ + await self.query.query(workspace_id, fact_cypher) + + # Create Structure and Associations + if "structures" in view_data: + for structure in view_data["structures"]: + struct_cypher = f""" + MATCH (r:Report {{identifier: '{report_id}'}}) + CREATE (s:Structure {{ + identifier: randomUUID(), + name: '{structure.get("name", "View Structure")}', + type: 'presentation' + }}) + CREATE (r)-[:REPORT_HAS_STRUCTURE]->(s) + """ + await self.query.query(workspace_id, struct_cypher) + + return report_id diff --git a/robosystems_client/extensions/table_ingest_client.py b/robosystems_client/extensions/table_ingest_client.py index de3ad27..8a14757 100644 --- a/robosystems_client/extensions/table_ingest_client.py +++ b/robosystems_client/extensions/table_ingest_client.py @@ -11,21 +11,21 @@ import logging import httpx -from ..api.tables.get_upload_url import ( - sync_detailed as get_upload_url, +from ..api.files.create_file_upload import ( + sync_detailed as create_file_upload, ) -from ..api.tables.update_file_status import ( - sync_detailed as update_file_status, +from ..api.files.update_file import ( + sync_detailed as update_file, ) from ..api.tables.list_tables import ( sync_detailed as list_tables, ) -from ..api.tables.ingest_tables import ( - sync_detailed as ingest_tables, +from ..api.materialization.materialize_graph import ( + sync_detailed as materialize_graph, ) from ..models.file_upload_request import FileUploadRequest from ..models.file_status_update import FileStatusUpdate -from ..models.bulk_ingest_request import BulkIngestRequest +from ..models.materialize_request import MaterializeRequest logger = logging.getLogger(__name__) @@ -167,17 +167,16 @@ def upload_parquet_file( ) upload_request = FileUploadRequest( - file_name=file_name, content_type="application/x-parquet" + file_name=file_name, content_type="application/x-parquet", table_name=table_name ) kwargs = { "graph_id": graph_id, - "table_name": table_name, "client": client, "body": upload_request, } - response = get_upload_url(**kwargs) + response = create_file_upload(**kwargs) if not response.parsed: error_msg = f"Failed to get upload URL (status: {response.status_code})" @@ -246,11 +245,11 @@ def upload_parquet_file( "body": status_update, } - update_response = update_file_status(**kwargs) + update_response = update_file(**kwargs) if not update_response.parsed: logger.error( - f"No parsed response from update_file_status. Status code: {update_response.status_code}" + f"No parsed response from update_file. Status code: {update_response.status_code}" ) return UploadResult( file_id=file_id, @@ -354,14 +353,16 @@ def ingest_all_tables( self, graph_id: str, options: Optional[IngestOptions] = None ) -> Dict[str, Any]: """ - Ingest all staging tables into the graph. + Materialize the graph from all staging tables. + + This rebuilds the complete graph database from the current state of DuckDB staging tables. Args: graph_id: The graph ID options: Ingest options Returns: - Dictionary with ingestion results + Dictionary with materialization results """ if options is None: options = IngestOptions() @@ -381,36 +382,36 @@ def ingest_all_tables( ) if options.on_progress: - options.on_progress("Starting table ingestion...") + options.on_progress("Starting table materialization...") - ingest_request = BulkIngestRequest( - ignore_errors=options.ignore_errors, rebuild=options.rebuild + materialize_request = MaterializeRequest( + ignore_errors=options.ignore_errors, rebuild=options.rebuild, force=True ) kwargs = { "graph_id": graph_id, "client": client, - "body": ingest_request, + "body": materialize_request, } - response = ingest_tables(**kwargs) + response = materialize_graph(**kwargs) if not response.parsed: - return {"success": False, "error": "Failed to ingest tables"} + return {"success": False, "error": "Failed to materialize graph"} result = { "success": True, "operation_id": getattr(response.parsed, "operation_id", None), - "message": getattr(response.parsed, "message", "Ingestion started"), + "message": getattr(response.parsed, "message", "Materialization started"), } if options.on_progress: - options.on_progress("✅ Table ingestion completed") + options.on_progress("✅ Graph materialization completed") return result except Exception as e: - logger.error(f"Failed to ingest tables: {e}") + logger.error(f"Failed to materialize graph: {e}") return {"success": False, "error": str(e)} def upload_and_ingest( diff --git a/robosystems_client/extensions/view_builder_client.py b/robosystems_client/extensions/view_builder_client.py new file mode 100644 index 0000000..8274ba6 --- /dev/null +++ b/robosystems_client/extensions/view_builder_client.py @@ -0,0 +1,617 @@ +"""View Builder Client Extension + +Client for building financial views and reports following the Financial Report Creator architecture. +Queries data from main graph, applies mappings, and generates views. +""" + +from typing import List, Optional, Dict, Any +from dataclasses import dataclass +from enum import Enum +import pandas as pd +from datetime import datetime + + +class ViewSourceType(Enum): + """Source type for view generation""" + + TRANSACTIONS = "transactions" # Generate from transaction aggregation + FACT_SET = "fact_set" # Pivot existing facts + + +@dataclass +class ViewSource: + """Source configuration for view generation""" + + type: ViewSourceType + period_start: Optional[str] = None + period_end: Optional[str] = None + entity_id: Optional[str] = None + fact_set_id: Optional[str] = None + + +@dataclass +class ViewAxis: + """Axis configuration for pivot table""" + + type: str # "element", "period", "dimension" + dimension_axis: Optional[str] = None + hierarchy_root: Optional[str] = None + include_subtotals: bool = False + + +@dataclass +class ViewConfig: + """Configuration for view presentation""" + + rows: List[ViewAxis] + columns: List[ViewAxis] + measures: Optional[List[str]] = None # Fact value columns to include + + +@dataclass +class ViewResponse: + """Response from view generation""" + + facts: pd.DataFrame + metadata: Dict[str, Any] + execution_time_ms: int + + +class ViewBuilderClient: + """ + Client for building financial views and reports. + + Provides functionality to: + - Query trial balance from transactions + - Query existing facts with aspects + - Apply element mappings for aggregation + - Generate pivot table presentations + - Write views to subgraph workspaces + """ + + def __init__(self, query_client, element_mapping_client=None): + """ + Initialize with query client and optional element mapping client. + + Args: + query_client: RoboSystems query client + element_mapping_client: Optional ElementMappingClient for applying mappings + """ + self.query = query_client + self.element_mapping = element_mapping_client + + async def aggregate_trial_balance( + self, + graph_id: str, + period_start: str, + period_end: str, + entity_id: Optional[str] = None, + requested_dimensions: Optional[List[str]] = None, + ) -> pd.DataFrame: + """ + Aggregate transactions to trial balance (Mode 1: Transaction Aggregation). + + This queries transaction data from the main graph and aggregates it + to create a trial balance with debit/credit totals and net balances. + + Args: + graph_id: Graph ID to query (main graph) + period_start: Start date (YYYY-MM-DD) + period_end: End date (YYYY-MM-DD) + entity_id: Optional entity filter + requested_dimensions: Optional dimension axes + + Returns: + DataFrame with trial balance data + """ + # Build WHERE clause + conditions = ["t.date >= $period_start", "t.date <= $period_end"] + params = {"period_start": period_start, "period_end": period_end} + + if entity_id: + conditions.append("e.identifier = $entity_id") + params["entity_id"] = entity_id + + where_clause = " AND ".join(conditions) + + # Query transaction data with aggregation + cypher = f""" + MATCH (e:Entity)-[:ENTITY_HAS_TRANSACTION]->(t:Transaction) + -[:TRANSACTION_HAS_LINE_ITEM]->(li:LineItem) + -[:LINE_ITEM_RELATES_TO_ELEMENT]->(elem:Element) + WHERE {where_clause} + + WITH elem, + sum(li.debit_amount) AS total_debits, + sum(li.credit_amount) AS total_credits + + RETURN elem.identifier AS element_id, + elem.uri AS element_uri, + elem.name AS element_name, + elem.classification AS element_classification, + elem.balance AS element_balance, + elem.period_type AS element_period_type, + total_debits, + total_credits, + total_debits - total_credits AS net_balance + ORDER BY elem.name + """ + + result = await self.query.query(graph_id, cypher, params) + + if not result or not result.data: + # Return empty DataFrame with expected columns + return pd.DataFrame( + columns=[ + "element_id", + "element_uri", + "element_name", + "element_classification", + "element_balance", + "element_period_type", + "total_debits", + "total_credits", + "net_balance", + ] + ) + + return pd.DataFrame(result.data) + + async def query_facts_with_aspects( + self, + graph_id: str, + fact_set_id: Optional[str] = None, + period_start: Optional[str] = None, + period_end: Optional[str] = None, + entity_id: Optional[str] = None, + requested_dimensions: Optional[List[str]] = None, + ) -> pd.DataFrame: + """ + Query existing facts with all aspects (Mode 2: Existing Facts). + + This queries pre-computed facts from the main graph with their + elements, periods, units, and dimensions. + + Args: + graph_id: Graph ID to query (main graph) + fact_set_id: Optional fact set filter + period_start: Optional start date filter + period_end: Optional end date filter + entity_id: Optional entity filter + requested_dimensions: Optional dimension axes to include + + Returns: + DataFrame with fact data and aspects + """ + # Build WHERE clause + conditions = [] + params = {} + + if fact_set_id: + conditions.append("fs.identifier = $fact_set_id") + params["fact_set_id"] = fact_set_id + + if period_start and period_end: + conditions.append("p.end_date >= $period_start") + conditions.append("p.end_date <= $period_end") + params["period_start"] = period_start + params["period_end"] = period_end + + if entity_id: + conditions.append("e.identifier = $entity_id") + params["entity_id"] = entity_id + + where_clause = f"WHERE {' AND '.join(conditions)}" if conditions else "" + + # Query facts with all relationships + cypher = f""" + MATCH (fs:FactSet)-[:FACT_SET_CONTAINS_FACT]->(f:Fact) + MATCH (f)-[:FACT_HAS_ELEMENT]->(elem:Element) + MATCH (f)-[:FACT_HAS_PERIOD]->(p:Period) + OPTIONAL MATCH (f)-[:FACT_HAS_ENTITY]->(e:Entity) + OPTIONAL MATCH (f)-[:FACT_HAS_UNIT]->(u:Unit) + OPTIONAL MATCH (f)-[:FACT_HAS_DIMENSION]->(d:FactDimension) + {where_clause} + RETURN f.identifier AS fact_id, + f.numeric_value AS numeric_value, + f.value AS text_value, + elem.identifier AS element_id, + elem.uri AS element_uri, + elem.name AS element_name, + p.identifier AS period_id, + p.start_date AS period_start, + p.end_date AS period_end, + p.period_type AS period_type, + e.identifier AS entity_id, + e.name AS entity_name, + u.identifier AS unit_id, + u.name AS unit_name, + collect(DISTINCT { + "axis": d.axis_uri, + "member": d.member_uri + }) AS dimensions + """ + + result = await self.query.query(graph_id, cypher, params) + + if not result or not result.data: + return pd.DataFrame() + + # Convert to DataFrame and expand dimensions if needed + df = pd.DataFrame(result.data) + + # If requested_dimensions specified, filter/expand dimension columns + if requested_dimensions and not df.empty: + # This would expand dimension data into separate columns + # For now, keeping as nested structure + pass + + return df + + async def create_view( + self, + graph_id: str, + source: ViewSource, + view_config: Optional[ViewConfig] = None, + mapping_structure_id: Optional[str] = None, + workspace_id: Optional[str] = None, + ) -> ViewResponse: + """ + Create a financial view from source data. + + This is the main entry point for view generation. It: + 1. Queries source data (transactions or facts) + 2. Applies element mappings if specified + 3. Generates pivot table presentation if configured + 4. Optionally writes to subgraph workspace + + Args: + graph_id: Main graph ID to query data from + source: Source configuration (transactions or fact set) + view_config: Optional pivot table configuration + mapping_structure_id: Optional mapping to apply for aggregation + workspace_id: Optional subgraph to write results to + + Returns: + ViewResponse with generated view data + """ + start_time = datetime.now() + + # Step 1: Get source data + if source.type == ViewSourceType.TRANSACTIONS: + fact_data = await self.aggregate_trial_balance( + graph_id=graph_id, + period_start=source.period_start, + period_end=source.period_end, + entity_id=source.entity_id, + ) + source_type = "trial_balance_aggregation" + + elif source.type == ViewSourceType.FACT_SET: + fact_data = await self.query_facts_with_aspects( + graph_id=graph_id, + fact_set_id=source.fact_set_id, + period_start=source.period_start, + period_end=source.period_end, + entity_id=source.entity_id, + ) + source_type = "fact_set_query" + + else: + raise ValueError(f"Unsupported source type: {source.type}") + + # Step 2: Apply element mapping if specified + if mapping_structure_id and self.element_mapping: + # Get mapping structure from subgraph + mapping = await self.element_mapping.get_mapping_structure( + workspace_id or graph_id, mapping_structure_id + ) + if mapping: + fact_data = self.element_mapping.apply_element_mapping(fact_data, mapping) + + # Step 3: Generate pivot table if configured + if view_config: + fact_data = self._generate_pivot_table(fact_data, view_config) + + # Step 4: Write to workspace if specified + if workspace_id: + await self._write_to_workspace(workspace_id, fact_data, source_type) + + execution_time = int((datetime.now() - start_time).total_seconds() * 1000) + + return ViewResponse( + facts=fact_data, + metadata={ + "source": source_type, + "fact_count": len(fact_data), + "period_start": source.period_start, + "period_end": source.period_end, + "has_mapping": mapping_structure_id is not None, + "has_pivot": view_config is not None, + }, + execution_time_ms=execution_time, + ) + + def _generate_pivot_table( + self, fact_data: pd.DataFrame, view_config: ViewConfig + ) -> pd.DataFrame: + """ + Generate pivot table from fact data. + + Args: + fact_data: Source fact DataFrame + view_config: Pivot configuration + + Returns: + Pivoted DataFrame + """ + if fact_data.empty: + return fact_data + + # Determine value column + value_col = ( + "numeric_value" if "numeric_value" in fact_data.columns else "net_balance" + ) + + # Build index (rows) and columns lists + index_cols = [] + for axis in view_config.rows: + if axis.type == "element": + index_cols.append("element_name") + elif axis.type == "period": + index_cols.append("period_end") + elif axis.type == "dimension" and axis.dimension_axis: + # This would map dimension to column name + index_cols.append(f"dim_{axis.dimension_axis}") + + column_cols = [] + for axis in view_config.columns: + if axis.type == "element": + column_cols.append("element_name") + elif axis.type == "period": + column_cols.append("period_end") + elif axis.type == "dimension" and axis.dimension_axis: + column_cols.append(f"dim_{axis.dimension_axis}") + + # Apply pivot_table + if index_cols and column_cols: + pivoted = pd.pivot_table( + fact_data, + values=value_col, + index=index_cols, + columns=column_cols, + aggfunc="sum", + fill_value=0, + ) + # Flatten multi-index if needed + if isinstance(pivoted.columns, pd.MultiIndex): + pivoted.columns = ["_".join(map(str, col)) for col in pivoted.columns] + pivoted = pivoted.reset_index() + + elif index_cols: + # Group by index only + pivoted = fact_data.groupby(index_cols)[value_col].sum().reset_index() + + else: + pivoted = fact_data + + # Add subtotals if requested + for axis in view_config.rows: + if axis.include_subtotals and axis.type == "element": + pivoted = self._add_subtotals(pivoted, axis.hierarchy_root) + + return pivoted + + def _add_subtotals( + self, df: pd.DataFrame, hierarchy_root: str = None + ) -> pd.DataFrame: + """ + Add subtotal rows to DataFrame. + + This would implement hierarchical subtotal logic. + Simplified for illustration. + """ + # This would: + # 1. Query hierarchy from graph + # 2. Group elements by parent + # 3. Calculate subtotals + # 4. Insert subtotal rows + return df + + async def _write_to_workspace( + self, workspace_id: str, fact_data: pd.DataFrame, source_type: str + ): + """ + Write view data to subgraph workspace. + + Args: + workspace_id: Subgraph workspace ID + fact_data: Fact DataFrame to write + source_type: Source type for metadata + """ + if fact_data.empty: + return + + # Create View node in workspace + import uuid + + view_id = str(uuid.uuid4()) + + cypher = f""" + CREATE (v:View {{ + identifier: '{view_id}', + source_type: '{source_type}', + created_at: datetime(), + fact_count: {len(fact_data)} + }}) + RETURN v.identifier as view_id + """ + + await self.query.query(workspace_id, cypher) + + # Write facts in batches + for batch_start in range(0, len(fact_data), 100): + batch_end = min(batch_start + 100, len(fact_data)) + batch = fact_data.iloc[batch_start:batch_end] + + # Build CREATE statements for batch + creates = [] + for _, row in batch.iterrows(): + fact_props = { + "identifier": str(uuid.uuid4()), + "element_id": row.get("element_id", ""), + "value": float(row.get("net_balance", 0)), + "period_end": row.get("period_end", ""), + } + + # Convert to Cypher properties string + props_str = ", ".join( + [ + f"{k}: '{v}'" if isinstance(v, str) else f"{k}: {v}" + for k, v in fact_props.items() + ] + ) + + creates.append(f"CREATE (f:ViewFact {{{props_str}}})") + + batch_cypher = "\n".join(creates) + await self.query.query(workspace_id, batch_cypher) + + async def build_fact_grid( + self, + graph_id: str, + view_source: ViewSource, + view_config: ViewConfig, + workspace_id: Optional[str] = None, + ) -> Dict[str, Any]: + """ + Build a Fact Grid data structure for flexible presentation. + + This follows the hypercube model from the architecture document, + creating a multidimensional structure that can generate multiple + presentation formats. + + Args: + graph_id: Main graph to query data from + view_source: Source configuration + view_config: View configuration + workspace_id: Optional workspace to write to + + Returns: + Fact Grid structure with dimensions, measures, and hierarchies + """ + # Get base fact data + view_response = await self.create_view( + graph_id=graph_id, + source=view_source, + view_config=view_config, + workspace_id=workspace_id, + ) + + fact_data = view_response.facts + + # Build Fact Grid structure + fact_grid = { + "dimensions": self._extract_dimensions(fact_data), + "measures": self._extract_measures(fact_data), + "hierarchies": await self._build_hierarchies(graph_id, fact_data), + "facts": fact_data.to_dict("records"), + "metadata": { + "fact_count": len(fact_data), + "source": view_source.type.value, + "created_at": datetime.now().isoformat(), + **view_response.metadata, + }, + } + + return fact_grid + + def _extract_dimensions(self, fact_data: pd.DataFrame) -> List[Dict[str, Any]]: + """Extract dimensions from fact data""" + dimensions = [] + + # Time dimension + if "period_end" in fact_data.columns: + dimensions.append( + { + "name": "Time", + "type": "temporal", + "values": fact_data["period_end"].unique().tolist(), + } + ) + + # Element dimension + if "element_name" in fact_data.columns: + dimensions.append( + { + "name": "Element", + "type": "hierarchical", + "values": fact_data["element_name"].unique().tolist(), + } + ) + + # Entity dimension + if "entity_name" in fact_data.columns: + dimensions.append( + { + "name": "Entity", + "type": "categorical", + "values": fact_data["entity_name"].unique().tolist(), + } + ) + + return dimensions + + def _extract_measures(self, fact_data: pd.DataFrame) -> List[Dict[str, Any]]: + """Extract measures from fact data""" + measures = [] + + # Numeric measures + for col in ["net_balance", "numeric_value", "total_debits", "total_credits"]: + if col in fact_data.columns: + measures.append( + {"name": col, "type": "numeric", "aggregation": "sum", "format": "currency"} + ) + + return measures + + async def _build_hierarchies( + self, graph_id: str, fact_data: pd.DataFrame + ) -> Dict[str, Any]: + """Build element hierarchies from graph""" + if "element_id" not in fact_data.columns: + return {} + + element_ids = fact_data["element_id"].unique().tolist() + + # Query element hierarchy from graph + cypher = """ + MATCH (e:Element) + WHERE e.identifier IN $element_ids + OPTIONAL MATCH (e)-[:ELEMENT_HAS_PARENT]->(parent:Element) + RETURN e.identifier as element_id, + e.name as element_name, + parent.identifier as parent_id, + parent.name as parent_name + """ + + result = await self.query.query(graph_id, cypher, {"element_ids": element_ids}) + + if not result or not result.data: + return {} + + # Build hierarchy tree + hierarchy = {} + for row in result.data: + element_id = row["element_id"] + parent_id = row.get("parent_id") + + if parent_id: + if parent_id not in hierarchy: + hierarchy[parent_id] = {"name": row["parent_name"], "children": []} + hierarchy[parent_id]["children"].append(element_id) + + if element_id not in hierarchy: + hierarchy[element_id] = {"name": row["element_name"], "children": []} + + return hierarchy diff --git a/robosystems_client/models/__init__.py b/robosystems_client/models/__init__.py index ff62e24..fad8094 100644 --- a/robosystems_client/models/__init__.py +++ b/robosystems_client/models/__init__.py @@ -40,8 +40,6 @@ from .batch_agent_request import BatchAgentRequest from .batch_agent_response import BatchAgentResponse from .billing_customer import BillingCustomer -from .bulk_ingest_request import BulkIngestRequest -from .bulk_ingest_response import BulkIngestResponse from .cancel_operation_response_canceloperation import ( CancelOperationResponseCanceloperation, ) @@ -69,6 +67,7 @@ from .create_repository_subscription_request import CreateRepositorySubscriptionRequest from .create_subgraph_request import CreateSubgraphRequest from .create_subgraph_request_metadata_type_0 import CreateSubgraphRequestMetadataType0 +from .create_view_request import CreateViewRequest from .credit_limits import CreditLimits from .credit_summary import CreditSummary from .credit_summary_operation_breakdown import CreditSummaryOperationBreakdown @@ -96,6 +95,7 @@ from .enhanced_credit_transaction_response_metadata import ( EnhancedCreditTransactionResponseMetadata, ) +from .enhanced_file_status_layers import EnhancedFileStatusLayers from .error_response import ErrorResponse from .exchange_token_request import ExchangeTokenRequest from .exchange_token_request_metadata_type_0 import ExchangeTokenRequestMetadataType0 @@ -103,7 +103,9 @@ from .execute_cypher_query_response_200_data_item import ( ExecuteCypherQueryResponse200DataItem, ) +from .fact_detail import FactDetail from .file_info import FileInfo +from .file_layer_status import FileLayerStatus from .file_status_update import FileStatusUpdate from .file_upload_request import FileUploadRequest from .file_upload_response import FileUploadResponse @@ -158,6 +160,9 @@ from .list_table_files_response import ListTableFilesResponse from .login_request import LoginRequest from .logout_user_response_logoutuser import LogoutUserResponseLogoutuser +from .materialize_request import MaterializeRequest +from .materialize_response import MaterializeResponse +from .materialize_status_response import MaterializeStatusResponse from .mcp_tool_call import MCPToolCall from .mcp_tool_call_arguments import MCPToolCallArguments from .mcp_tools_response import MCPToolsResponse @@ -220,6 +225,8 @@ from .reset_password_request import ResetPasswordRequest from .reset_password_validate_response import ResetPasswordValidateResponse from .response_mode import ResponseMode +from .save_view_request import SaveViewRequest +from .save_view_response import SaveViewResponse from .schema_export_response import SchemaExportResponse from .schema_export_response_data_stats_type_0 import SchemaExportResponseDataStatsType0 from .schema_export_response_schema_definition_type_0 import ( @@ -250,6 +257,7 @@ from .storage_limit_response import StorageLimitResponse from .storage_limits import StorageLimits from .storage_summary import StorageSummary +from .structure_detail import StructureDetail from .subgraph_quota_response import SubgraphQuotaResponse from .subgraph_response import SubgraphResponse from .subgraph_response_metadata_type_0 import SubgraphResponseMetadataType0 @@ -265,7 +273,6 @@ SyncConnectionResponseSyncconnection, ) from .table_info import TableInfo -from .table_ingest_result import TableIngestResult from .table_list_response import TableListResponse from .table_query_request import TableQueryRequest from .table_query_response import TableQueryResponse @@ -273,9 +280,7 @@ from .transaction_summary_response import TransactionSummaryResponse from .upcoming_invoice import UpcomingInvoice from .update_api_key_request import UpdateAPIKeyRequest -from .update_file_status_response_updatefilestatus import ( - UpdateFileStatusResponseUpdatefilestatus, -) +from .update_file_response_updatefile import UpdateFileResponseUpdatefile from .update_member_role_request import UpdateMemberRoleRequest from .update_org_request import UpdateOrgRequest from .update_password_request import UpdatePasswordRequest @@ -284,6 +289,12 @@ from .user_graphs_response import UserGraphsResponse from .user_response import UserResponse from .validation_error import ValidationError +from .view_axis_config import ViewAxisConfig +from .view_axis_config_element_labels_type_0 import ViewAxisConfigElementLabelsType0 +from .view_axis_config_member_labels_type_0 import ViewAxisConfigMemberLabelsType0 +from .view_config import ViewConfig +from .view_source import ViewSource +from .view_source_type import ViewSourceType __all__ = ( "AccountInfo", @@ -322,8 +333,6 @@ "BatchAgentRequest", "BatchAgentResponse", "BillingCustomer", - "BulkIngestRequest", - "BulkIngestResponse", "CancelOperationResponseCanceloperation", "CheckCreditBalanceResponseCheckcreditbalance", "CheckoutResponse", @@ -347,6 +356,7 @@ "CreateRepositorySubscriptionRequest", "CreateSubgraphRequest", "CreateSubgraphRequestMetadataType0", + "CreateViewRequest", "CreditLimits", "CreditSummary", "CreditSummaryOperationBreakdown", @@ -368,12 +378,15 @@ "EmailVerificationRequest", "EnhancedCreditTransactionResponse", "EnhancedCreditTransactionResponseMetadata", + "EnhancedFileStatusLayers", "ErrorResponse", "ExchangeTokenRequest", "ExchangeTokenRequestMetadataType0", "ExecuteCypherQueryResponse200", "ExecuteCypherQueryResponse200DataItem", + "FactDetail", "FileInfo", + "FileLayerStatus", "FileStatusUpdate", "FileUploadRequest", "FileUploadResponse", @@ -418,6 +431,9 @@ "ListTableFilesResponse", "LoginRequest", "LogoutUserResponseLogoutuser", + "MaterializeRequest", + "MaterializeResponse", + "MaterializeStatusResponse", "MCPToolCall", "MCPToolCallArguments", "MCPToolsResponse", @@ -470,6 +486,8 @@ "ResetPasswordRequest", "ResetPasswordValidateResponse", "ResponseMode", + "SaveViewRequest", + "SaveViewResponse", "SchemaExportResponse", "SchemaExportResponseDataStatsType0", "SchemaExportResponseSchemaDefinitionType0", @@ -494,6 +512,7 @@ "StorageLimitResponse", "StorageLimits", "StorageSummary", + "StructureDetail", "SubgraphQuotaResponse", "SubgraphResponse", "SubgraphResponseMetadataType0", @@ -505,7 +524,6 @@ "SyncConnectionRequestSyncOptionsType0", "SyncConnectionResponseSyncconnection", "TableInfo", - "TableIngestResult", "TableListResponse", "TableQueryRequest", "TableQueryResponse", @@ -513,7 +531,7 @@ "TransactionSummaryResponse", "UpcomingInvoice", "UpdateAPIKeyRequest", - "UpdateFileStatusResponseUpdatefilestatus", + "UpdateFileResponseUpdatefile", "UpdateMemberRoleRequest", "UpdateOrgRequest", "UpdatePasswordRequest", @@ -522,4 +540,10 @@ "UserGraphsResponse", "UserResponse", "ValidationError", + "ViewAxisConfig", + "ViewAxisConfigElementLabelsType0", + "ViewAxisConfigMemberLabelsType0", + "ViewConfig", + "ViewSource", + "ViewSourceType", ) diff --git a/robosystems_client/models/account_info.py b/robosystems_client/models/account_info.py index 0b21e44..55deb07 100644 --- a/robosystems_client/models/account_info.py +++ b/robosystems_client/models/account_info.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/agent_list_response.py b/robosystems_client/models/agent_list_response.py index c9a2d1e..3388a89 100644 --- a/robosystems_client/models/agent_list_response.py +++ b/robosystems_client/models/agent_list_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -20,7 +22,7 @@ class AgentListResponse: total (int): Total number of agents """ - agents: "AgentListResponseAgents" + agents: AgentListResponseAgents total: int additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) diff --git a/robosystems_client/models/agent_list_response_agents.py b/robosystems_client/models/agent_list_response_agents.py index 730f050..dd28ce7 100644 --- a/robosystems_client/models/agent_list_response_agents.py +++ b/robosystems_client/models/agent_list_response_agents.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -17,7 +19,7 @@ class AgentListResponseAgents: """Dictionary of available agents with metadata""" - additional_properties: dict[str, "AgentListResponseAgentsAdditionalProperty"] = ( + additional_properties: dict[str, AgentListResponseAgentsAdditionalProperty] = ( _attrs_field(init=False, factory=dict) ) @@ -52,11 +54,11 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) - def __getitem__(self, key: str) -> "AgentListResponseAgentsAdditionalProperty": + def __getitem__(self, key: str) -> AgentListResponseAgentsAdditionalProperty: return self.additional_properties[key] def __setitem__( - self, key: str, value: "AgentListResponseAgentsAdditionalProperty" + self, key: str, value: AgentListResponseAgentsAdditionalProperty ) -> None: self.additional_properties[key] = value diff --git a/robosystems_client/models/agent_list_response_agents_additional_property.py b/robosystems_client/models/agent_list_response_agents_additional_property.py index fe4481b..27c9e9b 100644 --- a/robosystems_client/models/agent_list_response_agents_additional_property.py +++ b/robosystems_client/models/agent_list_response_agents_additional_property.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/agent_message.py b/robosystems_client/models/agent_message.py index cd353bf..0f3dd9d 100644 --- a/robosystems_client/models/agent_message.py +++ b/robosystems_client/models/agent_message.py @@ -1,6 +1,8 @@ +from __future__ import annotations + import datetime from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -18,12 +20,12 @@ class AgentMessage: Attributes: role (str): Message role (user/assistant) content (str): Message content - timestamp (Union[None, Unset, datetime.datetime]): Message timestamp + timestamp (datetime.datetime | None | Unset): Message timestamp """ role: str content: str - timestamp: Union[None, Unset, datetime.datetime] = UNSET + timestamp: datetime.datetime | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -31,7 +33,7 @@ def to_dict(self) -> dict[str, Any]: content = self.content - timestamp: Union[None, Unset, str] + timestamp: None | str | Unset if isinstance(self.timestamp, Unset): timestamp = UNSET elif isinstance(self.timestamp, datetime.datetime): @@ -59,7 +61,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: content = d.pop("content") - def _parse_timestamp(data: object) -> Union[None, Unset, datetime.datetime]: + def _parse_timestamp(data: object) -> datetime.datetime | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -70,9 +72,9 @@ def _parse_timestamp(data: object) -> Union[None, Unset, datetime.datetime]: timestamp_type_0 = isoparse(data) return timestamp_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union[None, Unset, datetime.datetime], data) + return cast(datetime.datetime | None | Unset, data) timestamp = _parse_timestamp(d.pop("timestamp", UNSET)) diff --git a/robosystems_client/models/agent_metadata_response.py b/robosystems_client/models/agent_metadata_response.py index 2bed699..b08cd27 100644 --- a/robosystems_client/models/agent_metadata_response.py +++ b/robosystems_client/models/agent_metadata_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -20,8 +22,8 @@ class AgentMetadataResponse: capabilities (list[str]): Agent capabilities supported_modes (list[str]): Supported execution modes requires_credits (bool): Whether agent requires credits - author (Union[None, Unset, str]): Agent author - tags (Union[Unset, list[str]]): Agent tags + author (None | str | Unset): Agent author + tags (list[str] | Unset): Agent tags """ name: str @@ -30,8 +32,8 @@ class AgentMetadataResponse: capabilities: list[str] supported_modes: list[str] requires_credits: bool - author: Union[None, Unset, str] = UNSET - tags: Union[Unset, list[str]] = UNSET + author: None | str | Unset = UNSET + tags: list[str] | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -47,13 +49,13 @@ def to_dict(self) -> dict[str, Any]: requires_credits = self.requires_credits - author: Union[None, Unset, str] + author: None | str | Unset if isinstance(self.author, Unset): author = UNSET else: author = self.author - tags: Union[Unset, list[str]] = UNSET + tags: list[str] | Unset = UNSET if not isinstance(self.tags, Unset): tags = self.tags @@ -91,12 +93,12 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: requires_credits = d.pop("requires_credits") - def _parse_author(data: object) -> Union[None, Unset, str]: + def _parse_author(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) author = _parse_author(d.pop("author", UNSET)) diff --git a/robosystems_client/models/agent_recommendation.py b/robosystems_client/models/agent_recommendation.py index f01a3d5..62e01a3 100644 --- a/robosystems_client/models/agent_recommendation.py +++ b/robosystems_client/models/agent_recommendation.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -18,14 +20,14 @@ class AgentRecommendation: agent_name (str): Agent display name confidence (float): Confidence score (0-1) capabilities (list[str]): Agent capabilities - reason (Union[None, Unset, str]): Reason for recommendation + reason (None | str | Unset): Reason for recommendation """ agent_type: str agent_name: str confidence: float capabilities: list[str] - reason: Union[None, Unset, str] = UNSET + reason: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -37,7 +39,7 @@ def to_dict(self) -> dict[str, Any]: capabilities = self.capabilities - reason: Union[None, Unset, str] + reason: None | str | Unset if isinstance(self.reason, Unset): reason = UNSET else: @@ -69,12 +71,12 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: capabilities = cast(list[str], d.pop("capabilities")) - def _parse_reason(data: object) -> Union[None, Unset, str]: + def _parse_reason(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) reason = _parse_reason(d.pop("reason", UNSET)) diff --git a/robosystems_client/models/agent_recommendation_request.py b/robosystems_client/models/agent_recommendation_request.py index 9788004..6f29f69 100644 --- a/robosystems_client/models/agent_recommendation_request.py +++ b/robosystems_client/models/agent_recommendation_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -21,11 +23,11 @@ class AgentRecommendationRequest: Attributes: query (str): Query to analyze - context (Union['AgentRecommendationRequestContextType0', None, Unset]): Additional context + context (AgentRecommendationRequestContextType0 | None | Unset): Additional context """ query: str - context: Union["AgentRecommendationRequestContextType0", None, Unset] = UNSET + context: AgentRecommendationRequestContextType0 | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -35,7 +37,7 @@ def to_dict(self) -> dict[str, Any]: query = self.query - context: Union[None, Unset, dict[str, Any]] + context: dict[str, Any] | None | Unset if isinstance(self.context, Unset): context = UNSET elif isinstance(self.context, AgentRecommendationRequestContextType0): @@ -66,7 +68,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: def _parse_context( data: object, - ) -> Union["AgentRecommendationRequestContextType0", None, Unset]: + ) -> AgentRecommendationRequestContextType0 | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -77,9 +79,9 @@ def _parse_context( context_type_0 = AgentRecommendationRequestContextType0.from_dict(data) return context_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["AgentRecommendationRequestContextType0", None, Unset], data) + return cast(AgentRecommendationRequestContextType0 | None | Unset, data) context = _parse_context(d.pop("context", UNSET)) diff --git a/robosystems_client/models/agent_recommendation_request_context_type_0.py b/robosystems_client/models/agent_recommendation_request_context_type_0.py index f96bd7b..66697c4 100644 --- a/robosystems_client/models/agent_recommendation_request_context_type_0.py +++ b/robosystems_client/models/agent_recommendation_request_context_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/agent_recommendation_response.py b/robosystems_client/models/agent_recommendation_response.py index 40c157f..f67d536 100644 --- a/robosystems_client/models/agent_recommendation_response.py +++ b/robosystems_client/models/agent_recommendation_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -16,11 +18,11 @@ class AgentRecommendationResponse: """Response for agent recommendations. Attributes: - recommendations (list['AgentRecommendation']): List of agent recommendations sorted by confidence + recommendations (list[AgentRecommendation]): List of agent recommendations sorted by confidence query (str): The analyzed query """ - recommendations: list["AgentRecommendation"] + recommendations: list[AgentRecommendation] query: str additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) diff --git a/robosystems_client/models/agent_request.py b/robosystems_client/models/agent_request.py index e203122..5f03022 100644 --- a/robosystems_client/models/agent_request.py +++ b/robosystems_client/models/agent_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -22,27 +24,26 @@ class AgentRequest: Attributes: message (str): The query or message to process - history (Union[Unset, list['AgentMessage']]): Conversation history - context (Union['AgentRequestContextType0', None, Unset]): Additional context for analysis (e.g., enable_rag, + history (list[AgentMessage] | Unset): Conversation history + context (AgentRequestContextType0 | None | Unset): Additional context for analysis (e.g., enable_rag, include_schema) - mode (Union[AgentMode, None, Unset]): Execution mode Default: AgentMode.STANDARD. - agent_type (Union[None, Unset, str]): Specific agent type to use (optional) - selection_criteria (Union['SelectionCriteria', None, Unset]): Criteria for agent selection - force_extended_analysis (Union[Unset, bool]): Force extended analysis mode with comprehensive research Default: - False. - enable_rag (Union[Unset, bool]): Enable RAG context enrichment Default: True. - stream (Union[Unset, bool]): Enable streaming response Default: False. + mode (AgentMode | None | Unset): Execution mode Default: AgentMode.STANDARD. + agent_type (None | str | Unset): Specific agent type to use (optional) + selection_criteria (None | SelectionCriteria | Unset): Criteria for agent selection + force_extended_analysis (bool | Unset): Force extended analysis mode with comprehensive research Default: False. + enable_rag (bool | Unset): Enable RAG context enrichment Default: True. + stream (bool | Unset): Enable streaming response Default: False. """ message: str - history: Union[Unset, list["AgentMessage"]] = UNSET - context: Union["AgentRequestContextType0", None, Unset] = UNSET - mode: Union[AgentMode, None, Unset] = AgentMode.STANDARD - agent_type: Union[None, Unset, str] = UNSET - selection_criteria: Union["SelectionCriteria", None, Unset] = UNSET - force_extended_analysis: Union[Unset, bool] = False - enable_rag: Union[Unset, bool] = True - stream: Union[Unset, bool] = False + history: list[AgentMessage] | Unset = UNSET + context: AgentRequestContextType0 | None | Unset = UNSET + mode: AgentMode | None | Unset = AgentMode.STANDARD + agent_type: None | str | Unset = UNSET + selection_criteria: None | SelectionCriteria | Unset = UNSET + force_extended_analysis: bool | Unset = False + enable_rag: bool | Unset = True + stream: bool | Unset = False additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -51,14 +52,14 @@ def to_dict(self) -> dict[str, Any]: message = self.message - history: Union[Unset, list[dict[str, Any]]] = UNSET + history: list[dict[str, Any]] | Unset = UNSET if not isinstance(self.history, Unset): history = [] for history_item_data in self.history: history_item = history_item_data.to_dict() history.append(history_item) - context: Union[None, Unset, dict[str, Any]] + context: dict[str, Any] | None | Unset if isinstance(self.context, Unset): context = UNSET elif isinstance(self.context, AgentRequestContextType0): @@ -66,7 +67,7 @@ def to_dict(self) -> dict[str, Any]: else: context = self.context - mode: Union[None, Unset, str] + mode: None | str | Unset if isinstance(self.mode, Unset): mode = UNSET elif isinstance(self.mode, AgentMode): @@ -74,13 +75,13 @@ def to_dict(self) -> dict[str, Any]: else: mode = self.mode - agent_type: Union[None, Unset, str] + agent_type: None | str | Unset if isinstance(self.agent_type, Unset): agent_type = UNSET else: agent_type = self.agent_type - selection_criteria: Union[None, Unset, dict[str, Any]] + selection_criteria: dict[str, Any] | None | Unset if isinstance(self.selection_criteria, Unset): selection_criteria = UNSET elif isinstance(self.selection_criteria, SelectionCriteria): @@ -129,14 +130,16 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) message = d.pop("message") - history = [] _history = d.pop("history", UNSET) - for history_item_data in _history or []: - history_item = AgentMessage.from_dict(history_item_data) + history: list[AgentMessage] | Unset = UNSET + if _history is not UNSET: + history = [] + for history_item_data in _history: + history_item = AgentMessage.from_dict(history_item_data) - history.append(history_item) + history.append(history_item) - def _parse_context(data: object) -> Union["AgentRequestContextType0", None, Unset]: + def _parse_context(data: object) -> AgentRequestContextType0 | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -147,13 +150,13 @@ def _parse_context(data: object) -> Union["AgentRequestContextType0", None, Unse context_type_0 = AgentRequestContextType0.from_dict(data) return context_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["AgentRequestContextType0", None, Unset], data) + return cast(AgentRequestContextType0 | None | Unset, data) context = _parse_context(d.pop("context", UNSET)) - def _parse_mode(data: object) -> Union[AgentMode, None, Unset]: + def _parse_mode(data: object) -> AgentMode | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -164,24 +167,22 @@ def _parse_mode(data: object) -> Union[AgentMode, None, Unset]: mode_type_0 = AgentMode(data) return mode_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union[AgentMode, None, Unset], data) + return cast(AgentMode | None | Unset, data) mode = _parse_mode(d.pop("mode", UNSET)) - def _parse_agent_type(data: object) -> Union[None, Unset, str]: + def _parse_agent_type(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) agent_type = _parse_agent_type(d.pop("agent_type", UNSET)) - def _parse_selection_criteria( - data: object, - ) -> Union["SelectionCriteria", None, Unset]: + def _parse_selection_criteria(data: object) -> None | SelectionCriteria | Unset: if data is None: return data if isinstance(data, Unset): @@ -192,9 +193,9 @@ def _parse_selection_criteria( selection_criteria_type_0 = SelectionCriteria.from_dict(data) return selection_criteria_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["SelectionCriteria", None, Unset], data) + return cast(None | SelectionCriteria | Unset, data) selection_criteria = _parse_selection_criteria(d.pop("selection_criteria", UNSET)) diff --git a/robosystems_client/models/agent_request_context_type_0.py b/robosystems_client/models/agent_request_context_type_0.py index 71c3dcc..4ac1f01 100644 --- a/robosystems_client/models/agent_request_context_type_0.py +++ b/robosystems_client/models/agent_request_context_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/agent_response.py b/robosystems_client/models/agent_response.py index 8d09c64..6947653 100644 --- a/robosystems_client/models/agent_response.py +++ b/robosystems_client/models/agent_response.py @@ -1,6 +1,8 @@ +from __future__ import annotations + import datetime from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -28,27 +30,27 @@ class AgentResponse: content (str): The agent's response content agent_used (str): The agent type that handled the request mode_used (AgentMode): Agent execution modes. - metadata (Union['AgentResponseMetadataType0', None, Unset]): Response metadata including routing info - tokens_used (Union['AgentResponseTokensUsedType0', None, Unset]): Token usage statistics - confidence_score (Union[None, Unset, float]): Confidence score of the response (0.0-1.0 scale) - operation_id (Union[None, Unset, str]): Operation ID for SSE monitoring - is_partial (Union[Unset, bool]): Whether this is a partial response Default: False. - error_details (Union['AgentResponseErrorDetailsType0', None, Unset]): Error details if any - execution_time (Union[None, Unset, float]): Execution time in seconds - timestamp (Union[Unset, datetime.datetime]): Response timestamp + metadata (AgentResponseMetadataType0 | None | Unset): Response metadata including routing info + tokens_used (AgentResponseTokensUsedType0 | None | Unset): Token usage statistics + confidence_score (float | None | Unset): Confidence score of the response (0.0-1.0 scale) + operation_id (None | str | Unset): Operation ID for SSE monitoring + is_partial (bool | Unset): Whether this is a partial response Default: False. + error_details (AgentResponseErrorDetailsType0 | None | Unset): Error details if any + execution_time (float | None | Unset): Execution time in seconds + timestamp (datetime.datetime | Unset): Response timestamp """ content: str agent_used: str mode_used: AgentMode - metadata: Union["AgentResponseMetadataType0", None, Unset] = UNSET - tokens_used: Union["AgentResponseTokensUsedType0", None, Unset] = UNSET - confidence_score: Union[None, Unset, float] = UNSET - operation_id: Union[None, Unset, str] = UNSET - is_partial: Union[Unset, bool] = False - error_details: Union["AgentResponseErrorDetailsType0", None, Unset] = UNSET - execution_time: Union[None, Unset, float] = UNSET - timestamp: Union[Unset, datetime.datetime] = UNSET + metadata: AgentResponseMetadataType0 | None | Unset = UNSET + tokens_used: AgentResponseTokensUsedType0 | None | Unset = UNSET + confidence_score: float | None | Unset = UNSET + operation_id: None | str | Unset = UNSET + is_partial: bool | Unset = False + error_details: AgentResponseErrorDetailsType0 | None | Unset = UNSET + execution_time: float | None | Unset = UNSET + timestamp: datetime.datetime | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -64,7 +66,7 @@ def to_dict(self) -> dict[str, Any]: mode_used = self.mode_used.value - metadata: Union[None, Unset, dict[str, Any]] + metadata: dict[str, Any] | None | Unset if isinstance(self.metadata, Unset): metadata = UNSET elif isinstance(self.metadata, AgentResponseMetadataType0): @@ -72,7 +74,7 @@ def to_dict(self) -> dict[str, Any]: else: metadata = self.metadata - tokens_used: Union[None, Unset, dict[str, Any]] + tokens_used: dict[str, Any] | None | Unset if isinstance(self.tokens_used, Unset): tokens_used = UNSET elif isinstance(self.tokens_used, AgentResponseTokensUsedType0): @@ -80,13 +82,13 @@ def to_dict(self) -> dict[str, Any]: else: tokens_used = self.tokens_used - confidence_score: Union[None, Unset, float] + confidence_score: float | None | Unset if isinstance(self.confidence_score, Unset): confidence_score = UNSET else: confidence_score = self.confidence_score - operation_id: Union[None, Unset, str] + operation_id: None | str | Unset if isinstance(self.operation_id, Unset): operation_id = UNSET else: @@ -94,7 +96,7 @@ def to_dict(self) -> dict[str, Any]: is_partial = self.is_partial - error_details: Union[None, Unset, dict[str, Any]] + error_details: dict[str, Any] | None | Unset if isinstance(self.error_details, Unset): error_details = UNSET elif isinstance(self.error_details, AgentResponseErrorDetailsType0): @@ -102,13 +104,13 @@ def to_dict(self) -> dict[str, Any]: else: error_details = self.error_details - execution_time: Union[None, Unset, float] + execution_time: float | None | Unset if isinstance(self.execution_time, Unset): execution_time = UNSET else: execution_time = self.execution_time - timestamp: Union[Unset, str] = UNSET + timestamp: str | Unset = UNSET if not isinstance(self.timestamp, Unset): timestamp = self.timestamp.isoformat() @@ -155,9 +157,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: mode_used = AgentMode(d.pop("mode_used")) - def _parse_metadata( - data: object, - ) -> Union["AgentResponseMetadataType0", None, Unset]: + def _parse_metadata(data: object) -> AgentResponseMetadataType0 | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -168,15 +168,13 @@ def _parse_metadata( metadata_type_0 = AgentResponseMetadataType0.from_dict(data) return metadata_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["AgentResponseMetadataType0", None, Unset], data) + return cast(AgentResponseMetadataType0 | None | Unset, data) metadata = _parse_metadata(d.pop("metadata", UNSET)) - def _parse_tokens_used( - data: object, - ) -> Union["AgentResponseTokensUsedType0", None, Unset]: + def _parse_tokens_used(data: object) -> AgentResponseTokensUsedType0 | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -187,27 +185,27 @@ def _parse_tokens_used( tokens_used_type_0 = AgentResponseTokensUsedType0.from_dict(data) return tokens_used_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["AgentResponseTokensUsedType0", None, Unset], data) + return cast(AgentResponseTokensUsedType0 | None | Unset, data) tokens_used = _parse_tokens_used(d.pop("tokens_used", UNSET)) - def _parse_confidence_score(data: object) -> Union[None, Unset, float]: + def _parse_confidence_score(data: object) -> float | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, float], data) + return cast(float | None | Unset, data) confidence_score = _parse_confidence_score(d.pop("confidence_score", UNSET)) - def _parse_operation_id(data: object) -> Union[None, Unset, str]: + def _parse_operation_id(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) operation_id = _parse_operation_id(d.pop("operation_id", UNSET)) @@ -215,7 +213,7 @@ def _parse_operation_id(data: object) -> Union[None, Unset, str]: def _parse_error_details( data: object, - ) -> Union["AgentResponseErrorDetailsType0", None, Unset]: + ) -> AgentResponseErrorDetailsType0 | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -226,23 +224,23 @@ def _parse_error_details( error_details_type_0 = AgentResponseErrorDetailsType0.from_dict(data) return error_details_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["AgentResponseErrorDetailsType0", None, Unset], data) + return cast(AgentResponseErrorDetailsType0 | None | Unset, data) error_details = _parse_error_details(d.pop("error_details", UNSET)) - def _parse_execution_time(data: object) -> Union[None, Unset, float]: + def _parse_execution_time(data: object) -> float | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, float], data) + return cast(float | None | Unset, data) execution_time = _parse_execution_time(d.pop("execution_time", UNSET)) _timestamp = d.pop("timestamp", UNSET) - timestamp: Union[Unset, datetime.datetime] + timestamp: datetime.datetime | Unset if isinstance(_timestamp, Unset): timestamp = UNSET else: diff --git a/robosystems_client/models/agent_response_error_details_type_0.py b/robosystems_client/models/agent_response_error_details_type_0.py index 1eb5422..e210fee 100644 --- a/robosystems_client/models/agent_response_error_details_type_0.py +++ b/robosystems_client/models/agent_response_error_details_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/agent_response_metadata_type_0.py b/robosystems_client/models/agent_response_metadata_type_0.py index 1691b8a..b6365dd 100644 --- a/robosystems_client/models/agent_response_metadata_type_0.py +++ b/robosystems_client/models/agent_response_metadata_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/agent_response_tokens_used_type_0.py b/robosystems_client/models/agent_response_tokens_used_type_0.py index 1138f7c..5ee3fa7 100644 --- a/robosystems_client/models/agent_response_tokens_used_type_0.py +++ b/robosystems_client/models/agent_response_tokens_used_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/api_key_info.py b/robosystems_client/models/api_key_info.py index 4ffe9ea..ad84aa2 100644 --- a/robosystems_client/models/api_key_info.py +++ b/robosystems_client/models/api_key_info.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -19,9 +21,9 @@ class APIKeyInfo: prefix (str): API key prefix for identification is_active (bool): Whether the key is active created_at (str): Creation timestamp - description (Union[None, Unset, str]): API key description - last_used_at (Union[None, Unset, str]): Last used timestamp - expires_at (Union[None, Unset, str]): Expiration timestamp + description (None | str | Unset): API key description + last_used_at (None | str | Unset): Last used timestamp + expires_at (None | str | Unset): Expiration timestamp """ id: str @@ -29,9 +31,9 @@ class APIKeyInfo: prefix: str is_active: bool created_at: str - description: Union[None, Unset, str] = UNSET - last_used_at: Union[None, Unset, str] = UNSET - expires_at: Union[None, Unset, str] = UNSET + description: None | str | Unset = UNSET + last_used_at: None | str | Unset = UNSET + expires_at: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -45,19 +47,19 @@ def to_dict(self) -> dict[str, Any]: created_at = self.created_at - description: Union[None, Unset, str] + description: None | str | Unset if isinstance(self.description, Unset): description = UNSET else: description = self.description - last_used_at: Union[None, Unset, str] + last_used_at: None | str | Unset if isinstance(self.last_used_at, Unset): last_used_at = UNSET else: last_used_at = self.last_used_at - expires_at: Union[None, Unset, str] + expires_at: None | str | Unset if isinstance(self.expires_at, Unset): expires_at = UNSET else: @@ -96,30 +98,30 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: created_at = d.pop("created_at") - def _parse_description(data: object) -> Union[None, Unset, str]: + def _parse_description(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) description = _parse_description(d.pop("description", UNSET)) - def _parse_last_used_at(data: object) -> Union[None, Unset, str]: + def _parse_last_used_at(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) last_used_at = _parse_last_used_at(d.pop("last_used_at", UNSET)) - def _parse_expires_at(data: object) -> Union[None, Unset, str]: + def _parse_expires_at(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) expires_at = _parse_expires_at(d.pop("expires_at", UNSET)) diff --git a/robosystems_client/models/api_keys_response.py b/robosystems_client/models/api_keys_response.py index e21f9c3..e8a832e 100644 --- a/robosystems_client/models/api_keys_response.py +++ b/robosystems_client/models/api_keys_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -16,10 +18,10 @@ class APIKeysResponse: """Response model for listing API keys. Attributes: - api_keys (list['APIKeyInfo']): List of user's API keys + api_keys (list[APIKeyInfo]): List of user's API keys """ - api_keys: list["APIKeyInfo"] + api_keys: list[APIKeyInfo] additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: diff --git a/robosystems_client/models/auth_response.py b/robosystems_client/models/auth_response.py index 76ab8b5..9826124 100644 --- a/robosystems_client/models/auth_response.py +++ b/robosystems_client/models/auth_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -21,19 +23,19 @@ class AuthResponse: Attributes: user (AuthResponseUser): User information message (str): Success message - org (Union['AuthResponseOrgType0', None, Unset]): Organization information (personal org created automatically - on registration) - token (Union[None, Unset, str]): JWT authentication token (optional for cookie-based auth) - expires_in (Union[None, Unset, int]): Token expiry time in seconds from now - refresh_threshold (Union[None, Unset, int]): Recommended refresh threshold in seconds before expiry + org (AuthResponseOrgType0 | None | Unset): Organization information (personal org created automatically on + registration) + token (None | str | Unset): JWT authentication token (optional for cookie-based auth) + expires_in (int | None | Unset): Token expiry time in seconds from now + refresh_threshold (int | None | Unset): Recommended refresh threshold in seconds before expiry """ - user: "AuthResponseUser" + user: AuthResponseUser message: str - org: Union["AuthResponseOrgType0", None, Unset] = UNSET - token: Union[None, Unset, str] = UNSET - expires_in: Union[None, Unset, int] = UNSET - refresh_threshold: Union[None, Unset, int] = UNSET + org: AuthResponseOrgType0 | None | Unset = UNSET + token: None | str | Unset = UNSET + expires_in: int | None | Unset = UNSET + refresh_threshold: int | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -43,7 +45,7 @@ def to_dict(self) -> dict[str, Any]: message = self.message - org: Union[None, Unset, dict[str, Any]] + org: dict[str, Any] | None | Unset if isinstance(self.org, Unset): org = UNSET elif isinstance(self.org, AuthResponseOrgType0): @@ -51,19 +53,19 @@ def to_dict(self) -> dict[str, Any]: else: org = self.org - token: Union[None, Unset, str] + token: None | str | Unset if isinstance(self.token, Unset): token = UNSET else: token = self.token - expires_in: Union[None, Unset, int] + expires_in: int | None | Unset if isinstance(self.expires_in, Unset): expires_in = UNSET else: expires_in = self.expires_in - refresh_threshold: Union[None, Unset, int] + refresh_threshold: int | None | Unset if isinstance(self.refresh_threshold, Unset): refresh_threshold = UNSET else: @@ -98,7 +100,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: message = d.pop("message") - def _parse_org(data: object) -> Union["AuthResponseOrgType0", None, Unset]: + def _parse_org(data: object) -> AuthResponseOrgType0 | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -109,36 +111,36 @@ def _parse_org(data: object) -> Union["AuthResponseOrgType0", None, Unset]: org_type_0 = AuthResponseOrgType0.from_dict(data) return org_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["AuthResponseOrgType0", None, Unset], data) + return cast(AuthResponseOrgType0 | None | Unset, data) org = _parse_org(d.pop("org", UNSET)) - def _parse_token(data: object) -> Union[None, Unset, str]: + def _parse_token(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) token = _parse_token(d.pop("token", UNSET)) - def _parse_expires_in(data: object) -> Union[None, Unset, int]: + def _parse_expires_in(data: object) -> int | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, int], data) + return cast(int | None | Unset, data) expires_in = _parse_expires_in(d.pop("expires_in", UNSET)) - def _parse_refresh_threshold(data: object) -> Union[None, Unset, int]: + def _parse_refresh_threshold(data: object) -> int | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, int], data) + return cast(int | None | Unset, data) refresh_threshold = _parse_refresh_threshold(d.pop("refresh_threshold", UNSET)) diff --git a/robosystems_client/models/auth_response_org_type_0.py b/robosystems_client/models/auth_response_org_type_0.py index 8494bbb..6a49045 100644 --- a/robosystems_client/models/auth_response_org_type_0.py +++ b/robosystems_client/models/auth_response_org_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/auth_response_user.py b/robosystems_client/models/auth_response_user.py index ea397f0..5f59388 100644 --- a/robosystems_client/models/auth_response_user.py +++ b/robosystems_client/models/auth_response_user.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/available_extension.py b/robosystems_client/models/available_extension.py index 5cf2ea2..52a8358 100644 --- a/robosystems_client/models/available_extension.py +++ b/robosystems_client/models/available_extension.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -15,12 +17,12 @@ class AvailableExtension: Attributes: name (str): description (str): - enabled (Union[Unset, bool]): Default: False. + enabled (bool | Unset): Default: False. """ name: str description: str - enabled: Union[Unset, bool] = False + enabled: bool | Unset = False additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: diff --git a/robosystems_client/models/available_extensions_response.py b/robosystems_client/models/available_extensions_response.py index 288fe02..1a1eff5 100644 --- a/robosystems_client/models/available_extensions_response.py +++ b/robosystems_client/models/available_extensions_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -15,10 +17,10 @@ class AvailableExtensionsResponse: """ Attributes: - extensions (list['AvailableExtension']): + extensions (list[AvailableExtension]): """ - extensions: list["AvailableExtension"] + extensions: list[AvailableExtension] additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: diff --git a/robosystems_client/models/available_graph_tiers_response.py b/robosystems_client/models/available_graph_tiers_response.py index dabc486..cffa41f 100644 --- a/robosystems_client/models/available_graph_tiers_response.py +++ b/robosystems_client/models/available_graph_tiers_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -16,10 +18,10 @@ class AvailableGraphTiersResponse: """Response containing available graph tiers. Attributes: - tiers (list['GraphTierInfo']): List of available tiers + tiers (list[GraphTierInfo]): List of available tiers """ - tiers: list["GraphTierInfo"] + tiers: list[GraphTierInfo] additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: diff --git a/robosystems_client/models/backup_create_request.py b/robosystems_client/models/backup_create_request.py index 024974f..f1f07b7 100644 --- a/robosystems_client/models/backup_create_request.py +++ b/robosystems_client/models/backup_create_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -14,21 +16,21 @@ class BackupCreateRequest: """Request model for creating a backup. Attributes: - backup_format (Union[Unset, str]): Backup format - only 'full_dump' is supported (complete .kuzu database file) + backup_format (str | Unset): Backup format - only 'full_dump' is supported (complete .kuzu database file) Default: 'full_dump'. - backup_type (Union[Unset, str]): Backup type - only 'full' is supported Default: 'full'. - retention_days (Union[Unset, int]): Retention period in days Default: 90. - compression (Union[Unset, bool]): Enable compression (always enabled for optimal storage) Default: True. - encryption (Union[Unset, bool]): Enable encryption (encrypted backups cannot be downloaded) Default: False. - schedule (Union[None, Unset, str]): Optional cron schedule for automated backups + backup_type (str | Unset): Backup type - only 'full' is supported Default: 'full'. + retention_days (int | Unset): Retention period in days Default: 90. + compression (bool | Unset): Enable compression (always enabled for optimal storage) Default: True. + encryption (bool | Unset): Enable encryption (encrypted backups cannot be downloaded) Default: False. + schedule (None | str | Unset): Optional cron schedule for automated backups """ - backup_format: Union[Unset, str] = "full_dump" - backup_type: Union[Unset, str] = "full" - retention_days: Union[Unset, int] = 90 - compression: Union[Unset, bool] = True - encryption: Union[Unset, bool] = False - schedule: Union[None, Unset, str] = UNSET + backup_format: str | Unset = "full_dump" + backup_type: str | Unset = "full" + retention_days: int | Unset = 90 + compression: bool | Unset = True + encryption: bool | Unset = False + schedule: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -42,7 +44,7 @@ def to_dict(self) -> dict[str, Any]: encryption = self.encryption - schedule: Union[None, Unset, str] + schedule: None | str | Unset if isinstance(self.schedule, Unset): schedule = UNSET else: @@ -79,12 +81,12 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: encryption = d.pop("encryption", UNSET) - def _parse_schedule(data: object) -> Union[None, Unset, str]: + def _parse_schedule(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) schedule = _parse_schedule(d.pop("schedule", UNSET)) diff --git a/robosystems_client/models/backup_download_url_response.py b/robosystems_client/models/backup_download_url_response.py index f9c6e41..1c72c4e 100644 --- a/robosystems_client/models/backup_download_url_response.py +++ b/robosystems_client/models/backup_download_url_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/backup_limits.py b/robosystems_client/models/backup_limits.py index b5cc070..7ccf2ed 100644 --- a/robosystems_client/models/backup_limits.py +++ b/robosystems_client/models/backup_limits.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/backup_list_response.py b/robosystems_client/models/backup_list_response.py index eabd9fa..45a24bb 100644 --- a/robosystems_client/models/backup_list_response.py +++ b/robosystems_client/models/backup_list_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -16,12 +18,12 @@ class BackupListResponse: """Response model for backup list. Attributes: - backups (list['BackupResponse']): + backups (list[BackupResponse]): total_count (int): graph_id (str): """ - backups: list["BackupResponse"] + backups: list[BackupResponse] total_count: int graph_id: str additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) diff --git a/robosystems_client/models/backup_response.py b/robosystems_client/models/backup_response.py index 7b78067..e930db3 100644 --- a/robosystems_client/models/backup_response.py +++ b/robosystems_client/models/backup_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -27,8 +29,8 @@ class BackupResponse: compression_enabled (bool): allow_export (bool): created_at (str): - completed_at (Union[None, str]): - expires_at (Union[None, str]): + completed_at (None | str): + expires_at (None | str): """ backup_id: str @@ -46,8 +48,8 @@ class BackupResponse: compression_enabled: bool allow_export: bool created_at: str - completed_at: Union[None, str] - expires_at: Union[None, str] + completed_at: None | str + expires_at: None | str additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -81,10 +83,10 @@ def to_dict(self) -> dict[str, Any]: created_at = self.created_at - completed_at: Union[None, str] + completed_at: None | str completed_at = self.completed_at - expires_at: Union[None, str] + expires_at: None | str expires_at = self.expires_at field_dict: dict[str, Any] = {} @@ -146,17 +148,17 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: created_at = d.pop("created_at") - def _parse_completed_at(data: object) -> Union[None, str]: + def _parse_completed_at(data: object) -> None | str: if data is None: return data - return cast(Union[None, str], data) + return cast(None | str, data) completed_at = _parse_completed_at(d.pop("completed_at")) - def _parse_expires_at(data: object) -> Union[None, str]: + def _parse_expires_at(data: object) -> None | str: if data is None: return data - return cast(Union[None, str], data) + return cast(None | str, data) expires_at = _parse_expires_at(d.pop("expires_at")) diff --git a/robosystems_client/models/backup_restore_request.py b/robosystems_client/models/backup_restore_request.py index ba98c9d..0447166 100644 --- a/robosystems_client/models/backup_restore_request.py +++ b/robosystems_client/models/backup_restore_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -14,13 +16,12 @@ class BackupRestoreRequest: """Request model for restoring from a backup. Attributes: - create_system_backup (Union[Unset, bool]): Create a system backup of existing database before restore Default: - True. - verify_after_restore (Union[Unset, bool]): Verify database integrity after restore Default: True. + create_system_backup (bool | Unset): Create a system backup of existing database before restore Default: True. + verify_after_restore (bool | Unset): Verify database integrity after restore Default: True. """ - create_system_backup: Union[Unset, bool] = True - verify_after_restore: Union[Unset, bool] = True + create_system_backup: bool | Unset = True + verify_after_restore: bool | Unset = True additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: diff --git a/robosystems_client/models/backup_stats_response.py b/robosystems_client/models/backup_stats_response.py index 43153f6..c540c55 100644 --- a/robosystems_client/models/backup_stats_response.py +++ b/robosystems_client/models/backup_stats_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -27,7 +29,7 @@ class BackupStatsResponse: total_compressed_size_bytes (int): storage_saved_bytes (int): average_compression_ratio (float): - latest_backup_date (Union[None, str]): + latest_backup_date (None | str): backup_formats (BackupStatsResponseBackupFormats): """ @@ -40,8 +42,8 @@ class BackupStatsResponse: total_compressed_size_bytes: int storage_saved_bytes: int average_compression_ratio: float - latest_backup_date: Union[None, str] - backup_formats: "BackupStatsResponseBackupFormats" + latest_backup_date: None | str + backup_formats: BackupStatsResponseBackupFormats additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -63,7 +65,7 @@ def to_dict(self) -> dict[str, Any]: average_compression_ratio = self.average_compression_ratio - latest_backup_date: Union[None, str] + latest_backup_date: None | str latest_backup_date = self.latest_backup_date backup_formats = self.backup_formats.to_dict() @@ -113,10 +115,10 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: average_compression_ratio = d.pop("average_compression_ratio") - def _parse_latest_backup_date(data: object) -> Union[None, str]: + def _parse_latest_backup_date(data: object) -> None | str: if data is None: return data - return cast(Union[None, str], data) + return cast(None | str, data) latest_backup_date = _parse_latest_backup_date(d.pop("latest_backup_date")) diff --git a/robosystems_client/models/backup_stats_response_backup_formats.py b/robosystems_client/models/backup_stats_response_backup_formats.py index dae069f..f00d6a7 100644 --- a/robosystems_client/models/backup_stats_response_backup_formats.py +++ b/robosystems_client/models/backup_stats_response_backup_formats.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/batch_agent_request.py b/robosystems_client/models/batch_agent_request.py index 616b0c0..d902c5c 100644 --- a/robosystems_client/models/batch_agent_request.py +++ b/robosystems_client/models/batch_agent_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -18,12 +20,12 @@ class BatchAgentRequest: """Request for batch processing multiple queries. Attributes: - queries (list['AgentRequest']): List of queries to process (max 10) - parallel (Union[Unset, bool]): Process queries in parallel Default: False. + queries (list[AgentRequest]): List of queries to process (max 10) + parallel (bool | Unset): Process queries in parallel Default: False. """ - queries: list["AgentRequest"] - parallel: Union[Unset, bool] = False + queries: list[AgentRequest] + parallel: bool | Unset = False additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: diff --git a/robosystems_client/models/batch_agent_response.py b/robosystems_client/models/batch_agent_response.py index 3c94e47..b2ed141 100644 --- a/robosystems_client/models/batch_agent_response.py +++ b/robosystems_client/models/batch_agent_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -16,12 +18,12 @@ class BatchAgentResponse: """Response for batch processing. Attributes: - results (list['AgentResponse']): List of agent responses (includes successes and failures) + results (list[AgentResponse]): List of agent responses (includes successes and failures) total_execution_time (float): Total execution time in seconds parallel_processed (bool): Whether queries were processed in parallel """ - results: list["AgentResponse"] + results: list[AgentResponse] total_execution_time: float parallel_processed: bool additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) diff --git a/robosystems_client/models/billing_customer.py b/robosystems_client/models/billing_customer.py index 3c3c559..d5f7578 100644 --- a/robosystems_client/models/billing_customer.py +++ b/robosystems_client/models/billing_customer.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -21,17 +23,17 @@ class BillingCustomer: org_id (str): Organization ID has_payment_method (bool): Whether organization has a payment method on file invoice_billing_enabled (bool): Whether invoice billing is enabled (enterprise customers) - payment_methods (list['PaymentMethod']): List of payment methods on file + payment_methods (list[PaymentMethod]): List of payment methods on file created_at (str): Customer creation timestamp (ISO format) - stripe_customer_id (Union[None, Unset, str]): Stripe customer ID if applicable + stripe_customer_id (None | str | Unset): Stripe customer ID if applicable """ org_id: str has_payment_method: bool invoice_billing_enabled: bool - payment_methods: list["PaymentMethod"] + payment_methods: list[PaymentMethod] created_at: str - stripe_customer_id: Union[None, Unset, str] = UNSET + stripe_customer_id: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -48,7 +50,7 @@ def to_dict(self) -> dict[str, Any]: created_at = self.created_at - stripe_customer_id: Union[None, Unset, str] + stripe_customer_id: None | str | Unset if isinstance(self.stripe_customer_id, Unset): stripe_customer_id = UNSET else: @@ -90,12 +92,12 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: created_at = d.pop("created_at") - def _parse_stripe_customer_id(data: object) -> Union[None, Unset, str]: + def _parse_stripe_customer_id(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) stripe_customer_id = _parse_stripe_customer_id(d.pop("stripe_customer_id", UNSET)) diff --git a/robosystems_client/models/bulk_ingest_response.py b/robosystems_client/models/bulk_ingest_response.py deleted file mode 100644 index 3f69e74..0000000 --- a/robosystems_client/models/bulk_ingest_response.py +++ /dev/null @@ -1,137 +0,0 @@ -from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -if TYPE_CHECKING: - from ..models.table_ingest_result import TableIngestResult - - -T = TypeVar("T", bound="BulkIngestResponse") - - -@_attrs_define -class BulkIngestResponse: - """ - Attributes: - status (str): Overall ingestion status - graph_id (str): Graph database identifier - total_tables (int): Total number of tables processed - successful_tables (int): Number of successfully ingested tables - failed_tables (int): Number of failed table ingestions - skipped_tables (int): Number of skipped tables (no files) - total_rows_ingested (int): Total rows ingested across all tables - total_execution_time_ms (float): Total execution time in milliseconds - results (list['TableIngestResult']): Per-table ingestion results - """ - - status: str - graph_id: str - total_tables: int - successful_tables: int - failed_tables: int - skipped_tables: int - total_rows_ingested: int - total_execution_time_ms: float - results: list["TableIngestResult"] - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - status = self.status - - graph_id = self.graph_id - - total_tables = self.total_tables - - successful_tables = self.successful_tables - - failed_tables = self.failed_tables - - skipped_tables = self.skipped_tables - - total_rows_ingested = self.total_rows_ingested - - total_execution_time_ms = self.total_execution_time_ms - - results = [] - for results_item_data in self.results: - results_item = results_item_data.to_dict() - results.append(results_item) - - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - field_dict.update( - { - "status": status, - "graph_id": graph_id, - "total_tables": total_tables, - "successful_tables": successful_tables, - "failed_tables": failed_tables, - "skipped_tables": skipped_tables, - "total_rows_ingested": total_rows_ingested, - "total_execution_time_ms": total_execution_time_ms, - "results": results, - } - ) - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - from ..models.table_ingest_result import TableIngestResult - - d = dict(src_dict) - status = d.pop("status") - - graph_id = d.pop("graph_id") - - total_tables = d.pop("total_tables") - - successful_tables = d.pop("successful_tables") - - failed_tables = d.pop("failed_tables") - - skipped_tables = d.pop("skipped_tables") - - total_rows_ingested = d.pop("total_rows_ingested") - - total_execution_time_ms = d.pop("total_execution_time_ms") - - results = [] - _results = d.pop("results") - for results_item_data in _results: - results_item = TableIngestResult.from_dict(results_item_data) - - results.append(results_item) - - bulk_ingest_response = cls( - status=status, - graph_id=graph_id, - total_tables=total_tables, - successful_tables=successful_tables, - failed_tables=failed_tables, - skipped_tables=skipped_tables, - total_rows_ingested=total_rows_ingested, - total_execution_time_ms=total_execution_time_ms, - results=results, - ) - - bulk_ingest_response.additional_properties = d - return bulk_ingest_response - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/robosystems_client/models/cancel_operation_response_canceloperation.py b/robosystems_client/models/cancel_operation_response_canceloperation.py index b75b626..a28ec38 100644 --- a/robosystems_client/models/cancel_operation_response_canceloperation.py +++ b/robosystems_client/models/cancel_operation_response_canceloperation.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/check_credit_balance_response_checkcreditbalance.py b/robosystems_client/models/check_credit_balance_response_checkcreditbalance.py index b3ab577..d96fe45 100644 --- a/robosystems_client/models/check_credit_balance_response_checkcreditbalance.py +++ b/robosystems_client/models/check_credit_balance_response_checkcreditbalance.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/checkout_response.py b/robosystems_client/models/checkout_response.py index 368d453..e7386f3 100644 --- a/robosystems_client/models/checkout_response.py +++ b/robosystems_client/models/checkout_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -14,34 +16,34 @@ class CheckoutResponse: """Response from checkout session creation. Attributes: - checkout_url (Union[None, Unset, str]): URL to redirect user to for payment - session_id (Union[None, Unset, str]): Checkout session ID for status polling - subscription_id (Union[None, Unset, str]): Internal subscription ID - requires_checkout (Union[Unset, bool]): Whether checkout is required Default: True. - billing_disabled (Union[Unset, bool]): Whether billing is disabled on this instance Default: False. + checkout_url (None | str | Unset): URL to redirect user to for payment + session_id (None | str | Unset): Checkout session ID for status polling + subscription_id (None | str | Unset): Internal subscription ID + requires_checkout (bool | Unset): Whether checkout is required Default: True. + billing_disabled (bool | Unset): Whether billing is disabled on this instance Default: False. """ - checkout_url: Union[None, Unset, str] = UNSET - session_id: Union[None, Unset, str] = UNSET - subscription_id: Union[None, Unset, str] = UNSET - requires_checkout: Union[Unset, bool] = True - billing_disabled: Union[Unset, bool] = False + checkout_url: None | str | Unset = UNSET + session_id: None | str | Unset = UNSET + subscription_id: None | str | Unset = UNSET + requires_checkout: bool | Unset = True + billing_disabled: bool | Unset = False additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: - checkout_url: Union[None, Unset, str] + checkout_url: None | str | Unset if isinstance(self.checkout_url, Unset): checkout_url = UNSET else: checkout_url = self.checkout_url - session_id: Union[None, Unset, str] + session_id: None | str | Unset if isinstance(self.session_id, Unset): session_id = UNSET else: session_id = self.session_id - subscription_id: Union[None, Unset, str] + subscription_id: None | str | Unset if isinstance(self.subscription_id, Unset): subscription_id = UNSET else: @@ -71,30 +73,30 @@ def to_dict(self) -> dict[str, Any]: def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) - def _parse_checkout_url(data: object) -> Union[None, Unset, str]: + def _parse_checkout_url(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) checkout_url = _parse_checkout_url(d.pop("checkout_url", UNSET)) - def _parse_session_id(data: object) -> Union[None, Unset, str]: + def _parse_session_id(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) session_id = _parse_session_id(d.pop("session_id", UNSET)) - def _parse_subscription_id(data: object) -> Union[None, Unset, str]: + def _parse_subscription_id(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) subscription_id = _parse_subscription_id(d.pop("subscription_id", UNSET)) diff --git a/robosystems_client/models/checkout_status_response.py b/robosystems_client/models/checkout_status_response.py index e0be1ca..8037554 100644 --- a/robosystems_client/models/checkout_status_response.py +++ b/robosystems_client/models/checkout_status_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -16,17 +18,17 @@ class CheckoutStatusResponse: Attributes: status (str): Checkout status: 'pending_payment', 'provisioning', 'completed', 'failed' subscription_id (str): Internal subscription ID - resource_id (Union[None, Unset, str]): Resource ID (graph_id for both graphs and repositories) once provisioned. - For repositories, this is the repository slug (e.g., 'sec') - operation_id (Union[None, Unset, str]): SSE operation ID for monitoring provisioning progress - error (Union[None, Unset, str]): Error message if checkout failed + resource_id (None | str | Unset): Resource ID (graph_id for both graphs and repositories) once provisioned. For + repositories, this is the repository slug (e.g., 'sec') + operation_id (None | str | Unset): SSE operation ID for monitoring provisioning progress + error (None | str | Unset): Error message if checkout failed """ status: str subscription_id: str - resource_id: Union[None, Unset, str] = UNSET - operation_id: Union[None, Unset, str] = UNSET - error: Union[None, Unset, str] = UNSET + resource_id: None | str | Unset = UNSET + operation_id: None | str | Unset = UNSET + error: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -34,19 +36,19 @@ def to_dict(self) -> dict[str, Any]: subscription_id = self.subscription_id - resource_id: Union[None, Unset, str] + resource_id: None | str | Unset if isinstance(self.resource_id, Unset): resource_id = UNSET else: resource_id = self.resource_id - operation_id: Union[None, Unset, str] + operation_id: None | str | Unset if isinstance(self.operation_id, Unset): operation_id = UNSET else: operation_id = self.operation_id - error: Union[None, Unset, str] + error: None | str | Unset if isinstance(self.error, Unset): error = UNSET else: @@ -76,30 +78,30 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: subscription_id = d.pop("subscription_id") - def _parse_resource_id(data: object) -> Union[None, Unset, str]: + def _parse_resource_id(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) resource_id = _parse_resource_id(d.pop("resource_id", UNSET)) - def _parse_operation_id(data: object) -> Union[None, Unset, str]: + def _parse_operation_id(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) operation_id = _parse_operation_id(d.pop("operation_id", UNSET)) - def _parse_error(data: object) -> Union[None, Unset, str]: + def _parse_error(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) error = _parse_error(d.pop("error", UNSET)) diff --git a/robosystems_client/models/connection_options_response.py b/robosystems_client/models/connection_options_response.py index 935dd66..1fa4479 100644 --- a/robosystems_client/models/connection_options_response.py +++ b/robosystems_client/models/connection_options_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -16,11 +18,11 @@ class ConnectionOptionsResponse: """Response with all available connection options. Attributes: - providers (list['ConnectionProviderInfo']): Available connection providers + providers (list[ConnectionProviderInfo]): Available connection providers total_providers (int): Total number of providers """ - providers: list["ConnectionProviderInfo"] + providers: list[ConnectionProviderInfo] total_providers: int additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) diff --git a/robosystems_client/models/connection_provider_info.py b/robosystems_client/models/connection_provider_info.py index 2a6a92d..ae63151 100644 --- a/robosystems_client/models/connection_provider_info.py +++ b/robosystems_client/models/connection_provider_info.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -23,11 +25,11 @@ class ConnectionProviderInfo: required_config (list[str]): Required configuration fields features (list[str]): Supported features data_types (list[str]): Types of data available - auth_flow (Union[None, Unset, str]): Description of authentication flow - optional_config (Union[Unset, list[str]]): Optional configuration fields - sync_frequency (Union[None, Unset, str]): Typical sync frequency - setup_instructions (Union[None, Unset, str]): Setup instructions - documentation_url (Union[None, Unset, str]): Link to documentation + auth_flow (None | str | Unset): Description of authentication flow + optional_config (list[str] | Unset): Optional configuration fields + sync_frequency (None | str | Unset): Typical sync frequency + setup_instructions (None | str | Unset): Setup instructions + documentation_url (None | str | Unset): Link to documentation """ provider: ConnectionProviderInfoProvider @@ -37,11 +39,11 @@ class ConnectionProviderInfo: required_config: list[str] features: list[str] data_types: list[str] - auth_flow: Union[None, Unset, str] = UNSET - optional_config: Union[Unset, list[str]] = UNSET - sync_frequency: Union[None, Unset, str] = UNSET - setup_instructions: Union[None, Unset, str] = UNSET - documentation_url: Union[None, Unset, str] = UNSET + auth_flow: None | str | Unset = UNSET + optional_config: list[str] | Unset = UNSET + sync_frequency: None | str | Unset = UNSET + setup_instructions: None | str | Unset = UNSET + documentation_url: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -59,29 +61,29 @@ def to_dict(self) -> dict[str, Any]: data_types = self.data_types - auth_flow: Union[None, Unset, str] + auth_flow: None | str | Unset if isinstance(self.auth_flow, Unset): auth_flow = UNSET else: auth_flow = self.auth_flow - optional_config: Union[Unset, list[str]] = UNSET + optional_config: list[str] | Unset = UNSET if not isinstance(self.optional_config, Unset): optional_config = self.optional_config - sync_frequency: Union[None, Unset, str] + sync_frequency: None | str | Unset if isinstance(self.sync_frequency, Unset): sync_frequency = UNSET else: sync_frequency = self.sync_frequency - setup_instructions: Union[None, Unset, str] + setup_instructions: None | str | Unset if isinstance(self.setup_instructions, Unset): setup_instructions = UNSET else: setup_instructions = self.setup_instructions - documentation_url: Union[None, Unset, str] + documentation_url: None | str | Unset if isinstance(self.documentation_url, Unset): documentation_url = UNSET else: @@ -130,41 +132,41 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: data_types = cast(list[str], d.pop("data_types")) - def _parse_auth_flow(data: object) -> Union[None, Unset, str]: + def _parse_auth_flow(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) auth_flow = _parse_auth_flow(d.pop("auth_flow", UNSET)) optional_config = cast(list[str], d.pop("optional_config", UNSET)) - def _parse_sync_frequency(data: object) -> Union[None, Unset, str]: + def _parse_sync_frequency(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) sync_frequency = _parse_sync_frequency(d.pop("sync_frequency", UNSET)) - def _parse_setup_instructions(data: object) -> Union[None, Unset, str]: + def _parse_setup_instructions(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) setup_instructions = _parse_setup_instructions(d.pop("setup_instructions", UNSET)) - def _parse_documentation_url(data: object) -> Union[None, Unset, str]: + def _parse_documentation_url(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) documentation_url = _parse_documentation_url(d.pop("documentation_url", UNSET)) diff --git a/robosystems_client/models/connection_response.py b/robosystems_client/models/connection_response.py index b4de35f..2cd37dd 100644 --- a/robosystems_client/models/connection_response.py +++ b/robosystems_client/models/connection_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -25,8 +27,8 @@ class ConnectionResponse: status (str): Connection status created_at (str): Creation timestamp metadata (ConnectionResponseMetadata): Provider-specific metadata - updated_at (Union[None, Unset, str]): Last update timestamp - last_sync (Union[None, Unset, str]): Last sync timestamp + updated_at (None | str | Unset): Last update timestamp + last_sync (None | str | Unset): Last sync timestamp """ connection_id: str @@ -34,9 +36,9 @@ class ConnectionResponse: entity_id: str status: str created_at: str - metadata: "ConnectionResponseMetadata" - updated_at: Union[None, Unset, str] = UNSET - last_sync: Union[None, Unset, str] = UNSET + metadata: ConnectionResponseMetadata + updated_at: None | str | Unset = UNSET + last_sync: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -52,13 +54,13 @@ def to_dict(self) -> dict[str, Any]: metadata = self.metadata.to_dict() - updated_at: Union[None, Unset, str] + updated_at: None | str | Unset if isinstance(self.updated_at, Unset): updated_at = UNSET else: updated_at = self.updated_at - last_sync: Union[None, Unset, str] + last_sync: None | str | Unset if isinstance(self.last_sync, Unset): last_sync = UNSET else: @@ -100,21 +102,21 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: metadata = ConnectionResponseMetadata.from_dict(d.pop("metadata")) - def _parse_updated_at(data: object) -> Union[None, Unset, str]: + def _parse_updated_at(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) updated_at = _parse_updated_at(d.pop("updated_at", UNSET)) - def _parse_last_sync(data: object) -> Union[None, Unset, str]: + def _parse_last_sync(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) last_sync = _parse_last_sync(d.pop("last_sync", UNSET)) diff --git a/robosystems_client/models/connection_response_metadata.py b/robosystems_client/models/connection_response_metadata.py index be2d65a..198a616 100644 --- a/robosystems_client/models/connection_response_metadata.py +++ b/robosystems_client/models/connection_response_metadata.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/copy_operation_limits.py b/robosystems_client/models/copy_operation_limits.py index ff27f21..d90cc5a 100644 --- a/robosystems_client/models/copy_operation_limits.py +++ b/robosystems_client/models/copy_operation_limits.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar, cast diff --git a/robosystems_client/models/create_api_key_request.py b/robosystems_client/models/create_api_key_request.py index a511b1f..831105f 100644 --- a/robosystems_client/models/create_api_key_request.py +++ b/robosystems_client/models/create_api_key_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -15,25 +17,25 @@ class CreateAPIKeyRequest: Attributes: name (str): Name for the API key - description (Union[None, Unset, str]): Optional description - expires_at (Union[None, Unset, str]): Optional expiration date in ISO format (e.g. 2024-12-31T23:59:59Z) + description (None | str | Unset): Optional description + expires_at (None | str | Unset): Optional expiration date in ISO format (e.g. 2024-12-31T23:59:59Z) """ name: str - description: Union[None, Unset, str] = UNSET - expires_at: Union[None, Unset, str] = UNSET + description: None | str | Unset = UNSET + expires_at: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: name = self.name - description: Union[None, Unset, str] + description: None | str | Unset if isinstance(self.description, Unset): description = UNSET else: description = self.description - expires_at: Union[None, Unset, str] + expires_at: None | str | Unset if isinstance(self.expires_at, Unset): expires_at = UNSET else: @@ -58,21 +60,21 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) name = d.pop("name") - def _parse_description(data: object) -> Union[None, Unset, str]: + def _parse_description(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) description = _parse_description(d.pop("description", UNSET)) - def _parse_expires_at(data: object) -> Union[None, Unset, str]: + def _parse_expires_at(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) expires_at = _parse_expires_at(d.pop("expires_at", UNSET)) diff --git a/robosystems_client/models/create_api_key_response.py b/robosystems_client/models/create_api_key_response.py index bba9ab5..e93611c 100644 --- a/robosystems_client/models/create_api_key_response.py +++ b/robosystems_client/models/create_api_key_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -20,7 +22,7 @@ class CreateAPIKeyResponse: key (str): The actual API key (only shown once) """ - api_key: "APIKeyInfo" + api_key: APIKeyInfo key: str additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) diff --git a/robosystems_client/models/create_checkout_request.py b/robosystems_client/models/create_checkout_request.py index d90b0e2..c2f454a 100644 --- a/robosystems_client/models/create_checkout_request.py +++ b/robosystems_client/models/create_checkout_request.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -26,7 +28,7 @@ class CreateCheckoutRequest: plan_name: str resource_type: str - resource_config: "CreateCheckoutRequestResourceConfig" + resource_config: CreateCheckoutRequestResourceConfig additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: diff --git a/robosystems_client/models/create_checkout_request_resource_config.py b/robosystems_client/models/create_checkout_request_resource_config.py index afe9bbf..f9a4b9c 100644 --- a/robosystems_client/models/create_checkout_request_resource_config.py +++ b/robosystems_client/models/create_checkout_request_resource_config.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/create_connection_request.py b/robosystems_client/models/create_connection_request.py index 19cd895..645b3b3 100644 --- a/robosystems_client/models/create_connection_request.py +++ b/robosystems_client/models/create_connection_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -23,16 +25,16 @@ class CreateConnectionRequest: Attributes: provider (CreateConnectionRequestProvider): Connection provider type entity_id (str): Entity identifier - sec_config (Union['SECConnectionConfig', None, Unset]): - quickbooks_config (Union['QuickBooksConnectionConfig', None, Unset]): - plaid_config (Union['PlaidConnectionConfig', None, Unset]): + sec_config (None | SECConnectionConfig | Unset): + quickbooks_config (None | QuickBooksConnectionConfig | Unset): + plaid_config (None | PlaidConnectionConfig | Unset): """ provider: CreateConnectionRequestProvider entity_id: str - sec_config: Union["SECConnectionConfig", None, Unset] = UNSET - quickbooks_config: Union["QuickBooksConnectionConfig", None, Unset] = UNSET - plaid_config: Union["PlaidConnectionConfig", None, Unset] = UNSET + sec_config: None | SECConnectionConfig | Unset = UNSET + quickbooks_config: None | QuickBooksConnectionConfig | Unset = UNSET + plaid_config: None | PlaidConnectionConfig | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -44,7 +46,7 @@ def to_dict(self) -> dict[str, Any]: entity_id = self.entity_id - sec_config: Union[None, Unset, dict[str, Any]] + sec_config: dict[str, Any] | None | Unset if isinstance(self.sec_config, Unset): sec_config = UNSET elif isinstance(self.sec_config, SECConnectionConfig): @@ -52,7 +54,7 @@ def to_dict(self) -> dict[str, Any]: else: sec_config = self.sec_config - quickbooks_config: Union[None, Unset, dict[str, Any]] + quickbooks_config: dict[str, Any] | None | Unset if isinstance(self.quickbooks_config, Unset): quickbooks_config = UNSET elif isinstance(self.quickbooks_config, QuickBooksConnectionConfig): @@ -60,7 +62,7 @@ def to_dict(self) -> dict[str, Any]: else: quickbooks_config = self.quickbooks_config - plaid_config: Union[None, Unset, dict[str, Any]] + plaid_config: dict[str, Any] | None | Unset if isinstance(self.plaid_config, Unset): plaid_config = UNSET elif isinstance(self.plaid_config, PlaidConnectionConfig): @@ -96,7 +98,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: entity_id = d.pop("entity_id") - def _parse_sec_config(data: object) -> Union["SECConnectionConfig", None, Unset]: + def _parse_sec_config(data: object) -> None | SECConnectionConfig | Unset: if data is None: return data if isinstance(data, Unset): @@ -107,15 +109,15 @@ def _parse_sec_config(data: object) -> Union["SECConnectionConfig", None, Unset] sec_config_type_0 = SECConnectionConfig.from_dict(data) return sec_config_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["SECConnectionConfig", None, Unset], data) + return cast(None | SECConnectionConfig | Unset, data) sec_config = _parse_sec_config(d.pop("sec_config", UNSET)) def _parse_quickbooks_config( data: object, - ) -> Union["QuickBooksConnectionConfig", None, Unset]: + ) -> None | QuickBooksConnectionConfig | Unset: if data is None: return data if isinstance(data, Unset): @@ -126,15 +128,13 @@ def _parse_quickbooks_config( quickbooks_config_type_0 = QuickBooksConnectionConfig.from_dict(data) return quickbooks_config_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["QuickBooksConnectionConfig", None, Unset], data) + return cast(None | QuickBooksConnectionConfig | Unset, data) quickbooks_config = _parse_quickbooks_config(d.pop("quickbooks_config", UNSET)) - def _parse_plaid_config( - data: object, - ) -> Union["PlaidConnectionConfig", None, Unset]: + def _parse_plaid_config(data: object) -> None | PlaidConnectionConfig | Unset: if data is None: return data if isinstance(data, Unset): @@ -145,9 +145,9 @@ def _parse_plaid_config( plaid_config_type_0 = PlaidConnectionConfig.from_dict(data) return plaid_config_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["PlaidConnectionConfig", None, Unset], data) + return cast(None | PlaidConnectionConfig | Unset, data) plaid_config = _parse_plaid_config(d.pop("plaid_config", UNSET)) diff --git a/robosystems_client/models/create_graph_request.py b/robosystems_client/models/create_graph_request.py index 55595cb..df6ef62 100644 --- a/robosystems_client/models/create_graph_request.py +++ b/robosystems_client/models/create_graph_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -25,24 +27,24 @@ class CreateGraphRequest: Attributes: metadata (GraphMetadata): Metadata for graph creation. - instance_tier (Union[Unset, str]): Instance tier: kuzu-standard, kuzu-large, kuzu-xlarge, neo4j-community-large, + instance_tier (str | Unset): Instance tier: kuzu-standard, kuzu-large, kuzu-xlarge, neo4j-community-large, neo4j-enterprise-xlarge Default: 'kuzu-standard'. - custom_schema (Union['CustomSchemaDefinition', None, Unset]): Custom schema definition to apply. If provided, - creates a generic custom graph. If omitted, creates an entity graph using schema_extensions. - initial_entity (Union['InitialEntityData', None, Unset]): Optional initial entity to create in the graph. If - provided with entity graph, populates the first entity node. - create_entity (Union[Unset, bool]): Whether to create the entity node and upload initial data. Only applies when + custom_schema (CustomSchemaDefinition | None | Unset): Custom schema definition to apply. If provided, creates a + generic custom graph. If omitted, creates an entity graph using schema_extensions. + initial_entity (InitialEntityData | None | Unset): Optional initial entity to create in the graph. If provided + with entity graph, populates the first entity node. + create_entity (bool | Unset): Whether to create the entity node and upload initial data. Only applies when initial_entity is provided. Set to False to create graph without populating entity data (useful for file-based ingestion workflows). Default: True. - tags (Union[Unset, list[str]]): Optional tags for organization + tags (list[str] | Unset): Optional tags for organization """ - metadata: "GraphMetadata" - instance_tier: Union[Unset, str] = "kuzu-standard" - custom_schema: Union["CustomSchemaDefinition", None, Unset] = UNSET - initial_entity: Union["InitialEntityData", None, Unset] = UNSET - create_entity: Union[Unset, bool] = True - tags: Union[Unset, list[str]] = UNSET + metadata: GraphMetadata + instance_tier: str | Unset = "kuzu-standard" + custom_schema: CustomSchemaDefinition | None | Unset = UNSET + initial_entity: InitialEntityData | None | Unset = UNSET + create_entity: bool | Unset = True + tags: list[str] | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -53,7 +55,7 @@ def to_dict(self) -> dict[str, Any]: instance_tier = self.instance_tier - custom_schema: Union[None, Unset, dict[str, Any]] + custom_schema: dict[str, Any] | None | Unset if isinstance(self.custom_schema, Unset): custom_schema = UNSET elif isinstance(self.custom_schema, CustomSchemaDefinition): @@ -61,7 +63,7 @@ def to_dict(self) -> dict[str, Any]: else: custom_schema = self.custom_schema - initial_entity: Union[None, Unset, dict[str, Any]] + initial_entity: dict[str, Any] | None | Unset if isinstance(self.initial_entity, Unset): initial_entity = UNSET elif isinstance(self.initial_entity, InitialEntityData): @@ -71,7 +73,7 @@ def to_dict(self) -> dict[str, Any]: create_entity = self.create_entity - tags: Union[Unset, list[str]] = UNSET + tags: list[str] | Unset = UNSET if not isinstance(self.tags, Unset): tags = self.tags @@ -106,9 +108,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: instance_tier = d.pop("instance_tier", UNSET) - def _parse_custom_schema( - data: object, - ) -> Union["CustomSchemaDefinition", None, Unset]: + def _parse_custom_schema(data: object) -> CustomSchemaDefinition | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -119,13 +119,13 @@ def _parse_custom_schema( custom_schema_type_0 = CustomSchemaDefinition.from_dict(data) return custom_schema_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["CustomSchemaDefinition", None, Unset], data) + return cast(CustomSchemaDefinition | None | Unset, data) custom_schema = _parse_custom_schema(d.pop("custom_schema", UNSET)) - def _parse_initial_entity(data: object) -> Union["InitialEntityData", None, Unset]: + def _parse_initial_entity(data: object) -> InitialEntityData | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -136,9 +136,9 @@ def _parse_initial_entity(data: object) -> Union["InitialEntityData", None, Unse initial_entity_type_0 = InitialEntityData.from_dict(data) return initial_entity_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["InitialEntityData", None, Unset], data) + return cast(InitialEntityData | None | Unset, data) initial_entity = _parse_initial_entity(d.pop("initial_entity", UNSET)) diff --git a/robosystems_client/models/create_org_request.py b/robosystems_client/models/create_org_request.py index 8ddd336..ae41025 100644 --- a/robosystems_client/models/create_org_request.py +++ b/robosystems_client/models/create_org_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -16,17 +18,17 @@ class CreateOrgRequest: Attributes: name (str): - org_type (Union[Unset, OrgType]): + org_type (OrgType | Unset): """ name: str - org_type: Union[Unset, OrgType] = UNSET + org_type: OrgType | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: name = self.name - org_type: Union[Unset, str] = UNSET + org_type: str | Unset = UNSET if not isinstance(self.org_type, Unset): org_type = self.org_type.value @@ -48,7 +50,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: name = d.pop("name") _org_type = d.pop("org_type", UNSET) - org_type: Union[Unset, OrgType] + org_type: OrgType | Unset if isinstance(_org_type, Unset): org_type = UNSET else: diff --git a/robosystems_client/models/create_repository_subscription_request.py b/robosystems_client/models/create_repository_subscription_request.py index 8b7a677..b03d549 100644 --- a/robosystems_client/models/create_repository_subscription_request.py +++ b/robosystems_client/models/create_repository_subscription_request.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/create_subgraph_request.py b/robosystems_client/models/create_subgraph_request.py index b43fd7f..246694e 100644 --- a/robosystems_client/models/create_subgraph_request.py +++ b/robosystems_client/models/create_subgraph_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -23,18 +25,20 @@ class CreateSubgraphRequest: Attributes: name (str): Alphanumeric name for the subgraph (e.g., dev, staging, prod1) display_name (str): Human-readable display name for the subgraph - description (Union[None, Unset, str]): Optional description of the subgraph's purpose - schema_extensions (Union[Unset, list[str]]): Schema extensions to include (inherits from parent by default) - subgraph_type (Union[Unset, SubgraphType]): Types of subgraphs. - metadata (Union['CreateSubgraphRequestMetadataType0', None, Unset]): Additional metadata for the subgraph + description (None | str | Unset): Optional description of the subgraph's purpose + schema_extensions (list[str] | Unset): Schema extensions to include (inherits from parent by default) + subgraph_type (SubgraphType | Unset): Types of subgraphs. + metadata (CreateSubgraphRequestMetadataType0 | None | Unset): Additional metadata for the subgraph + fork_parent (bool | Unset): If true, copy all data from parent graph to create a 'fork' Default: False. """ name: str display_name: str - description: Union[None, Unset, str] = UNSET - schema_extensions: Union[Unset, list[str]] = UNSET - subgraph_type: Union[Unset, SubgraphType] = UNSET - metadata: Union["CreateSubgraphRequestMetadataType0", None, Unset] = UNSET + description: None | str | Unset = UNSET + schema_extensions: list[str] | Unset = UNSET + subgraph_type: SubgraphType | Unset = UNSET + metadata: CreateSubgraphRequestMetadataType0 | None | Unset = UNSET + fork_parent: bool | Unset = False additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -46,21 +50,21 @@ def to_dict(self) -> dict[str, Any]: display_name = self.display_name - description: Union[None, Unset, str] + description: None | str | Unset if isinstance(self.description, Unset): description = UNSET else: description = self.description - schema_extensions: Union[Unset, list[str]] = UNSET + schema_extensions: list[str] | Unset = UNSET if not isinstance(self.schema_extensions, Unset): schema_extensions = self.schema_extensions - subgraph_type: Union[Unset, str] = UNSET + subgraph_type: str | Unset = UNSET if not isinstance(self.subgraph_type, Unset): subgraph_type = self.subgraph_type.value - metadata: Union[None, Unset, dict[str, Any]] + metadata: dict[str, Any] | None | Unset if isinstance(self.metadata, Unset): metadata = UNSET elif isinstance(self.metadata, CreateSubgraphRequestMetadataType0): @@ -68,6 +72,8 @@ def to_dict(self) -> dict[str, Any]: else: metadata = self.metadata + fork_parent = self.fork_parent + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -84,6 +90,8 @@ def to_dict(self) -> dict[str, Any]: field_dict["subgraph_type"] = subgraph_type if metadata is not UNSET: field_dict["metadata"] = metadata + if fork_parent is not UNSET: + field_dict["fork_parent"] = fork_parent return field_dict @@ -98,19 +106,19 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: display_name = d.pop("display_name") - def _parse_description(data: object) -> Union[None, Unset, str]: + def _parse_description(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) description = _parse_description(d.pop("description", UNSET)) schema_extensions = cast(list[str], d.pop("schema_extensions", UNSET)) _subgraph_type = d.pop("subgraph_type", UNSET) - subgraph_type: Union[Unset, SubgraphType] + subgraph_type: SubgraphType | Unset if isinstance(_subgraph_type, Unset): subgraph_type = UNSET else: @@ -118,7 +126,7 @@ def _parse_description(data: object) -> Union[None, Unset, str]: def _parse_metadata( data: object, - ) -> Union["CreateSubgraphRequestMetadataType0", None, Unset]: + ) -> CreateSubgraphRequestMetadataType0 | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -129,12 +137,14 @@ def _parse_metadata( metadata_type_0 = CreateSubgraphRequestMetadataType0.from_dict(data) return metadata_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["CreateSubgraphRequestMetadataType0", None, Unset], data) + return cast(CreateSubgraphRequestMetadataType0 | None | Unset, data) metadata = _parse_metadata(d.pop("metadata", UNSET)) + fork_parent = d.pop("fork_parent", UNSET) + create_subgraph_request = cls( name=name, display_name=display_name, @@ -142,6 +152,7 @@ def _parse_metadata( schema_extensions=schema_extensions, subgraph_type=subgraph_type, metadata=metadata, + fork_parent=fork_parent, ) create_subgraph_request.additional_properties = d diff --git a/robosystems_client/models/create_subgraph_request_metadata_type_0.py b/robosystems_client/models/create_subgraph_request_metadata_type_0.py index 849d392..0872683 100644 --- a/robosystems_client/models/create_subgraph_request_metadata_type_0.py +++ b/robosystems_client/models/create_subgraph_request_metadata_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/create_view_request.py b/robosystems_client/models/create_view_request.py new file mode 100644 index 0000000..a1d355c --- /dev/null +++ b/robosystems_client/models/create_view_request.py @@ -0,0 +1,141 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +if TYPE_CHECKING: + from ..models.view_config import ViewConfig + from ..models.view_source import ViewSource + + +T = TypeVar("T", bound="CreateViewRequest") + + +@_attrs_define +class CreateViewRequest: + """ + Attributes: + source (ViewSource): + name (None | str | Unset): Optional name for the view + view_config (ViewConfig | Unset): + presentation_formats (list[str] | Unset): Presentation formats to generate + mapping_structure_id (None | str | Unset): Optional mapping structure ID to aggregate Chart of Accounts elements + into reporting taxonomy elements + """ + + source: ViewSource + name: None | str | Unset = UNSET + view_config: ViewConfig | Unset = UNSET + presentation_formats: list[str] | Unset = UNSET + mapping_structure_id: None | str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + source = self.source.to_dict() + + name: None | str | Unset + if isinstance(self.name, Unset): + name = UNSET + else: + name = self.name + + view_config: dict[str, Any] | Unset = UNSET + if not isinstance(self.view_config, Unset): + view_config = self.view_config.to_dict() + + presentation_formats: list[str] | Unset = UNSET + if not isinstance(self.presentation_formats, Unset): + presentation_formats = self.presentation_formats + + mapping_structure_id: None | str | Unset + if isinstance(self.mapping_structure_id, Unset): + mapping_structure_id = UNSET + else: + mapping_structure_id = self.mapping_structure_id + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "source": source, + } + ) + if name is not UNSET: + field_dict["name"] = name + if view_config is not UNSET: + field_dict["view_config"] = view_config + if presentation_formats is not UNSET: + field_dict["presentation_formats"] = presentation_formats + if mapping_structure_id is not UNSET: + field_dict["mapping_structure_id"] = mapping_structure_id + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.view_config import ViewConfig + from ..models.view_source import ViewSource + + d = dict(src_dict) + source = ViewSource.from_dict(d.pop("source")) + + def _parse_name(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + name = _parse_name(d.pop("name", UNSET)) + + _view_config = d.pop("view_config", UNSET) + view_config: ViewConfig | Unset + if isinstance(_view_config, Unset): + view_config = UNSET + else: + view_config = ViewConfig.from_dict(_view_config) + + presentation_formats = cast(list[str], d.pop("presentation_formats", UNSET)) + + def _parse_mapping_structure_id(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + mapping_structure_id = _parse_mapping_structure_id( + d.pop("mapping_structure_id", UNSET) + ) + + create_view_request = cls( + source=source, + name=name, + view_config=view_config, + presentation_formats=presentation_formats, + mapping_structure_id=mapping_structure_id, + ) + + create_view_request.additional_properties = d + return create_view_request + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/credit_limits.py b/robosystems_client/models/credit_limits.py index ab7bc18..8521a37 100644 --- a/robosystems_client/models/credit_limits.py +++ b/robosystems_client/models/credit_limits.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/credit_summary.py b/robosystems_client/models/credit_summary.py index 8c244f3..238b458 100644 --- a/robosystems_client/models/credit_summary.py +++ b/robosystems_client/models/credit_summary.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -30,7 +32,7 @@ class CreditSummary: graph_tier: str total_credits_consumed: float total_base_cost: float - operation_breakdown: "CreditSummaryOperationBreakdown" + operation_breakdown: CreditSummaryOperationBreakdown cached_operations: int billable_operations: int transaction_count: int diff --git a/robosystems_client/models/credit_summary_operation_breakdown.py b/robosystems_client/models/credit_summary_operation_breakdown.py index 7268325..9b572e0 100644 --- a/robosystems_client/models/credit_summary_operation_breakdown.py +++ b/robosystems_client/models/credit_summary_operation_breakdown.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/credit_summary_response.py b/robosystems_client/models/credit_summary_response.py index 567b820..550141e 100644 --- a/robosystems_client/models/credit_summary_response.py +++ b/robosystems_client/models/credit_summary_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -21,7 +23,7 @@ class CreditSummaryResponse: consumed_this_month (float): transaction_count (int): usage_percentage (float): - last_allocation_date (Union[None, Unset, str]): + last_allocation_date (None | str | Unset): """ graph_id: str @@ -31,7 +33,7 @@ class CreditSummaryResponse: consumed_this_month: float transaction_count: int usage_percentage: float - last_allocation_date: Union[None, Unset, str] = UNSET + last_allocation_date: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -49,7 +51,7 @@ def to_dict(self) -> dict[str, Any]: usage_percentage = self.usage_percentage - last_allocation_date: Union[None, Unset, str] + last_allocation_date: None | str | Unset if isinstance(self.last_allocation_date, Unset): last_allocation_date = UNSET else: @@ -90,12 +92,12 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: usage_percentage = d.pop("usage_percentage") - def _parse_last_allocation_date(data: object) -> Union[None, Unset, str]: + def _parse_last_allocation_date(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) last_allocation_date = _parse_last_allocation_date( d.pop("last_allocation_date", UNSET) diff --git a/robosystems_client/models/custom_schema_definition.py b/robosystems_client/models/custom_schema_definition.py index 0d3fb07..4144b9e 100644 --- a/robosystems_client/models/custom_schema_definition.py +++ b/robosystems_client/models/custom_schema_definition.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -29,21 +31,21 @@ class CustomSchemaDefinition: Attributes: name (str): Schema name - version (Union[Unset, str]): Schema version Default: '1.0.0'. - description (Union[None, Unset, str]): Schema description - extends (Union[None, Unset, str]): Base schema to extend (e.g., 'base' for common utilities) - nodes (Union[Unset, list['CustomSchemaDefinitionNodesItem']]): List of node definitions with properties - relationships (Union[Unset, list['CustomSchemaDefinitionRelationshipsItem']]): List of relationship definitions - metadata (Union[Unset, CustomSchemaDefinitionMetadata]): Additional schema metadata + version (str | Unset): Schema version Default: '1.0.0'. + description (None | str | Unset): Schema description + extends (None | str | Unset): Base schema to extend (e.g., 'base' for common utilities) + nodes (list[CustomSchemaDefinitionNodesItem] | Unset): List of node definitions with properties + relationships (list[CustomSchemaDefinitionRelationshipsItem] | Unset): List of relationship definitions + metadata (CustomSchemaDefinitionMetadata | Unset): Additional schema metadata """ name: str - version: Union[Unset, str] = "1.0.0" - description: Union[None, Unset, str] = UNSET - extends: Union[None, Unset, str] = UNSET - nodes: Union[Unset, list["CustomSchemaDefinitionNodesItem"]] = UNSET - relationships: Union[Unset, list["CustomSchemaDefinitionRelationshipsItem"]] = UNSET - metadata: Union[Unset, "CustomSchemaDefinitionMetadata"] = UNSET + version: str | Unset = "1.0.0" + description: None | str | Unset = UNSET + extends: None | str | Unset = UNSET + nodes: list[CustomSchemaDefinitionNodesItem] | Unset = UNSET + relationships: list[CustomSchemaDefinitionRelationshipsItem] | Unset = UNSET + metadata: CustomSchemaDefinitionMetadata | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -51,33 +53,33 @@ def to_dict(self) -> dict[str, Any]: version = self.version - description: Union[None, Unset, str] + description: None | str | Unset if isinstance(self.description, Unset): description = UNSET else: description = self.description - extends: Union[None, Unset, str] + extends: None | str | Unset if isinstance(self.extends, Unset): extends = UNSET else: extends = self.extends - nodes: Union[Unset, list[dict[str, Any]]] = UNSET + nodes: list[dict[str, Any]] | Unset = UNSET if not isinstance(self.nodes, Unset): nodes = [] for nodes_item_data in self.nodes: nodes_item = nodes_item_data.to_dict() nodes.append(nodes_item) - relationships: Union[Unset, list[dict[str, Any]]] = UNSET + relationships: list[dict[str, Any]] | Unset = UNSET if not isinstance(self.relationships, Unset): relationships = [] for relationships_item_data in self.relationships: relationships_item = relationships_item_data.to_dict() relationships.append(relationships_item) - metadata: Union[Unset, dict[str, Any]] = UNSET + metadata: dict[str, Any] | Unset = UNSET if not isinstance(self.metadata, Unset): metadata = self.metadata.to_dict() @@ -120,42 +122,46 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: version = d.pop("version", UNSET) - def _parse_description(data: object) -> Union[None, Unset, str]: + def _parse_description(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) description = _parse_description(d.pop("description", UNSET)) - def _parse_extends(data: object) -> Union[None, Unset, str]: + def _parse_extends(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) extends = _parse_extends(d.pop("extends", UNSET)) - nodes = [] _nodes = d.pop("nodes", UNSET) - for nodes_item_data in _nodes or []: - nodes_item = CustomSchemaDefinitionNodesItem.from_dict(nodes_item_data) + nodes: list[CustomSchemaDefinitionNodesItem] | Unset = UNSET + if _nodes is not UNSET: + nodes = [] + for nodes_item_data in _nodes: + nodes_item = CustomSchemaDefinitionNodesItem.from_dict(nodes_item_data) - nodes.append(nodes_item) + nodes.append(nodes_item) - relationships = [] _relationships = d.pop("relationships", UNSET) - for relationships_item_data in _relationships or []: - relationships_item = CustomSchemaDefinitionRelationshipsItem.from_dict( - relationships_item_data - ) + relationships: list[CustomSchemaDefinitionRelationshipsItem] | Unset = UNSET + if _relationships is not UNSET: + relationships = [] + for relationships_item_data in _relationships: + relationships_item = CustomSchemaDefinitionRelationshipsItem.from_dict( + relationships_item_data + ) - relationships.append(relationships_item) + relationships.append(relationships_item) _metadata = d.pop("metadata", UNSET) - metadata: Union[Unset, CustomSchemaDefinitionMetadata] + metadata: CustomSchemaDefinitionMetadata | Unset if isinstance(_metadata, Unset): metadata = UNSET else: diff --git a/robosystems_client/models/custom_schema_definition_metadata.py b/robosystems_client/models/custom_schema_definition_metadata.py index 6881868..7517bc1 100644 --- a/robosystems_client/models/custom_schema_definition_metadata.py +++ b/robosystems_client/models/custom_schema_definition_metadata.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/custom_schema_definition_nodes_item.py b/robosystems_client/models/custom_schema_definition_nodes_item.py index aa36cbd..eb57e65 100644 --- a/robosystems_client/models/custom_schema_definition_nodes_item.py +++ b/robosystems_client/models/custom_schema_definition_nodes_item.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/custom_schema_definition_relationships_item.py b/robosystems_client/models/custom_schema_definition_relationships_item.py index 5bdd84b..0284d28 100644 --- a/robosystems_client/models/custom_schema_definition_relationships_item.py +++ b/robosystems_client/models/custom_schema_definition_relationships_item.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/cypher_query_request.py b/robosystems_client/models/cypher_query_request.py index 982ca05..ee8a739 100644 --- a/robosystems_client/models/cypher_query_request.py +++ b/robosystems_client/models/cypher_query_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define @@ -21,14 +23,14 @@ class CypherQueryRequest: Attributes: query (str): The Cypher query to execute. Use parameters ($param_name) for all dynamic values to prevent injection attacks. - parameters (Union['CypherQueryRequestParametersType0', None, Unset]): Query parameters for safe value - substitution. ALWAYS use parameters instead of string interpolation. - timeout (Union[None, Unset, int]): Query timeout in seconds (1-300) Default: 60. + parameters (CypherQueryRequestParametersType0 | None | Unset): Query parameters for safe value substitution. + ALWAYS use parameters instead of string interpolation. + timeout (int | None | Unset): Query timeout in seconds (1-300) Default: 60. """ query: str - parameters: Union["CypherQueryRequestParametersType0", None, Unset] = UNSET - timeout: Union[None, Unset, int] = 60 + parameters: CypherQueryRequestParametersType0 | None | Unset = UNSET + timeout: int | None | Unset = 60 def to_dict(self) -> dict[str, Any]: from ..models.cypher_query_request_parameters_type_0 import ( @@ -37,7 +39,7 @@ def to_dict(self) -> dict[str, Any]: query = self.query - parameters: Union[None, Unset, dict[str, Any]] + parameters: dict[str, Any] | None | Unset if isinstance(self.parameters, Unset): parameters = UNSET elif isinstance(self.parameters, CypherQueryRequestParametersType0): @@ -45,7 +47,7 @@ def to_dict(self) -> dict[str, Any]: else: parameters = self.parameters - timeout: Union[None, Unset, int] + timeout: int | None | Unset if isinstance(self.timeout, Unset): timeout = UNSET else: @@ -76,7 +78,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: def _parse_parameters( data: object, - ) -> Union["CypherQueryRequestParametersType0", None, Unset]: + ) -> CypherQueryRequestParametersType0 | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -87,18 +89,18 @@ def _parse_parameters( parameters_type_0 = CypherQueryRequestParametersType0.from_dict(data) return parameters_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["CypherQueryRequestParametersType0", None, Unset], data) + return cast(CypherQueryRequestParametersType0 | None | Unset, data) parameters = _parse_parameters(d.pop("parameters", UNSET)) - def _parse_timeout(data: object) -> Union[None, Unset, int]: + def _parse_timeout(data: object) -> int | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, int], data) + return cast(int | None | Unset, data) timeout = _parse_timeout(d.pop("timeout", UNSET)) diff --git a/robosystems_client/models/cypher_query_request_parameters_type_0.py b/robosystems_client/models/cypher_query_request_parameters_type_0.py index 87aff20..fb5488e 100644 --- a/robosystems_client/models/cypher_query_request_parameters_type_0.py +++ b/robosystems_client/models/cypher_query_request_parameters_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/database_health_response.py b/robosystems_client/models/database_health_response.py index 9a0296a..28ae785 100644 --- a/robosystems_client/models/database_health_response.py +++ b/robosystems_client/models/database_health_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -21,10 +23,10 @@ class DatabaseHealthResponse: query_count_24h (int): Number of queries executed in last 24 hours avg_query_time_ms (float): Average query execution time in milliseconds error_rate_24h (float): Error rate in last 24 hours (percentage) - last_query_time (Union[None, Unset, str]): Timestamp of last query execution - memory_usage_mb (Union[None, Unset, float]): Memory usage in MB - storage_usage_mb (Union[None, Unset, float]): Storage usage in MB - alerts (Union[Unset, list[str]]): Active alerts or warnings + last_query_time (None | str | Unset): Timestamp of last query execution + memory_usage_mb (float | None | Unset): Memory usage in MB + storage_usage_mb (float | None | Unset): Storage usage in MB + alerts (list[str] | Unset): Active alerts or warnings """ graph_id: str @@ -34,10 +36,10 @@ class DatabaseHealthResponse: query_count_24h: int avg_query_time_ms: float error_rate_24h: float - last_query_time: Union[None, Unset, str] = UNSET - memory_usage_mb: Union[None, Unset, float] = UNSET - storage_usage_mb: Union[None, Unset, float] = UNSET - alerts: Union[Unset, list[str]] = UNSET + last_query_time: None | str | Unset = UNSET + memory_usage_mb: float | None | Unset = UNSET + storage_usage_mb: float | None | Unset = UNSET + alerts: list[str] | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -55,25 +57,25 @@ def to_dict(self) -> dict[str, Any]: error_rate_24h = self.error_rate_24h - last_query_time: Union[None, Unset, str] + last_query_time: None | str | Unset if isinstance(self.last_query_time, Unset): last_query_time = UNSET else: last_query_time = self.last_query_time - memory_usage_mb: Union[None, Unset, float] + memory_usage_mb: float | None | Unset if isinstance(self.memory_usage_mb, Unset): memory_usage_mb = UNSET else: memory_usage_mb = self.memory_usage_mb - storage_usage_mb: Union[None, Unset, float] + storage_usage_mb: float | None | Unset if isinstance(self.storage_usage_mb, Unset): storage_usage_mb = UNSET else: storage_usage_mb = self.storage_usage_mb - alerts: Union[Unset, list[str]] = UNSET + alerts: list[str] | Unset = UNSET if not isinstance(self.alerts, Unset): alerts = self.alerts @@ -118,30 +120,30 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: error_rate_24h = d.pop("error_rate_24h") - def _parse_last_query_time(data: object) -> Union[None, Unset, str]: + def _parse_last_query_time(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) last_query_time = _parse_last_query_time(d.pop("last_query_time", UNSET)) - def _parse_memory_usage_mb(data: object) -> Union[None, Unset, float]: + def _parse_memory_usage_mb(data: object) -> float | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, float], data) + return cast(float | None | Unset, data) memory_usage_mb = _parse_memory_usage_mb(d.pop("memory_usage_mb", UNSET)) - def _parse_storage_usage_mb(data: object) -> Union[None, Unset, float]: + def _parse_storage_usage_mb(data: object) -> float | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, float], data) + return cast(float | None | Unset, data) storage_usage_mb = _parse_storage_usage_mb(d.pop("storage_usage_mb", UNSET)) diff --git a/robosystems_client/models/database_info_response.py b/robosystems_client/models/database_info_response.py index 74ed8dd..38e1495 100644 --- a/robosystems_client/models/database_info_response.py +++ b/robosystems_client/models/database_info_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -26,8 +28,8 @@ class DatabaseInfoResponse: last_modified (str): Last modification timestamp read_only (bool): Whether database is read-only backup_count (int): Number of available backups - schema_version (Union[None, Unset, str]): Schema version - last_backup_date (Union[None, Unset, str]): Date of last backup + schema_version (None | str | Unset): Schema version + last_backup_date (None | str | Unset): Date of last backup """ graph_id: str @@ -42,8 +44,8 @@ class DatabaseInfoResponse: last_modified: str read_only: bool backup_count: int - schema_version: Union[None, Unset, str] = UNSET - last_backup_date: Union[None, Unset, str] = UNSET + schema_version: None | str | Unset = UNSET + last_backup_date: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -71,13 +73,13 @@ def to_dict(self) -> dict[str, Any]: backup_count = self.backup_count - schema_version: Union[None, Unset, str] + schema_version: None | str | Unset if isinstance(self.schema_version, Unset): schema_version = UNSET else: schema_version = self.schema_version - last_backup_date: Union[None, Unset, str] + last_backup_date: None | str | Unset if isinstance(self.last_backup_date, Unset): last_backup_date = UNSET else: @@ -135,21 +137,21 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: backup_count = d.pop("backup_count") - def _parse_schema_version(data: object) -> Union[None, Unset, str]: + def _parse_schema_version(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) schema_version = _parse_schema_version(d.pop("schema_version", UNSET)) - def _parse_last_backup_date(data: object) -> Union[None, Unset, str]: + def _parse_last_backup_date(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) last_backup_date = _parse_last_backup_date(d.pop("last_backup_date", UNSET)) diff --git a/robosystems_client/models/delete_file_response.py b/robosystems_client/models/delete_file_response.py index e43e4a8..85574ba 100644 --- a/robosystems_client/models/delete_file_response.py +++ b/robosystems_client/models/delete_file_response.py @@ -1,9 +1,13 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field +from ..types import UNSET, Unset + T = TypeVar("T", bound="DeleteFileResponse") @@ -15,12 +19,18 @@ class DeleteFileResponse: file_id (str): Deleted file ID file_name (str): Deleted file name message (str): Operation message + cascade_deleted (bool | Unset): Whether cascade deletion was performed Default: False. + tables_affected (list[str] | None | Unset): Tables from which file data was deleted (if cascade=true) + graph_marked_stale (bool | Unset): Whether graph was marked as stale Default: False. """ status: str file_id: str file_name: str message: str + cascade_deleted: bool | Unset = False + tables_affected: list[str] | None | Unset = UNSET + graph_marked_stale: bool | Unset = False additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -32,6 +42,19 @@ def to_dict(self) -> dict[str, Any]: message = self.message + cascade_deleted = self.cascade_deleted + + tables_affected: list[str] | None | Unset + if isinstance(self.tables_affected, Unset): + tables_affected = UNSET + elif isinstance(self.tables_affected, list): + tables_affected = self.tables_affected + + else: + tables_affected = self.tables_affected + + graph_marked_stale = self.graph_marked_stale + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -42,6 +65,12 @@ def to_dict(self) -> dict[str, Any]: "message": message, } ) + if cascade_deleted is not UNSET: + field_dict["cascade_deleted"] = cascade_deleted + if tables_affected is not UNSET: + field_dict["tables_affected"] = tables_affected + if graph_marked_stale is not UNSET: + field_dict["graph_marked_stale"] = graph_marked_stale return field_dict @@ -56,11 +85,35 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: message = d.pop("message") + cascade_deleted = d.pop("cascade_deleted", UNSET) + + def _parse_tables_affected(data: object) -> list[str] | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, list): + raise TypeError() + tables_affected_type_0 = cast(list[str], data) + + return tables_affected_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(list[str] | None | Unset, data) + + tables_affected = _parse_tables_affected(d.pop("tables_affected", UNSET)) + + graph_marked_stale = d.pop("graph_marked_stale", UNSET) + delete_file_response = cls( status=status, file_id=file_id, file_name=file_name, message=message, + cascade_deleted=cascade_deleted, + tables_affected=tables_affected, + graph_marked_stale=graph_marked_stale, ) delete_file_response.additional_properties = d diff --git a/robosystems_client/models/delete_subgraph_request.py b/robosystems_client/models/delete_subgraph_request.py index 9650854..1fb0263 100644 --- a/robosystems_client/models/delete_subgraph_request.py +++ b/robosystems_client/models/delete_subgraph_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -14,14 +16,14 @@ class DeleteSubgraphRequest: """Request model for deleting a subgraph. Attributes: - force (Union[Unset, bool]): Force deletion even if subgraph contains data Default: False. - backup_first (Union[Unset, bool]): Create a backup before deletion Default: True. - backup_location (Union[None, Unset, str]): S3 location for backup (uses default if not specified) + force (bool | Unset): Force deletion even if subgraph contains data Default: False. + backup_first (bool | Unset): Create a backup before deletion Default: True. + backup_location (None | str | Unset): S3 location for backup (uses default if not specified) """ - force: Union[Unset, bool] = False - backup_first: Union[Unset, bool] = True - backup_location: Union[None, Unset, str] = UNSET + force: bool | Unset = False + backup_first: bool | Unset = True + backup_location: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -29,7 +31,7 @@ def to_dict(self) -> dict[str, Any]: backup_first = self.backup_first - backup_location: Union[None, Unset, str] + backup_location: None | str | Unset if isinstance(self.backup_location, Unset): backup_location = UNSET else: @@ -54,12 +56,12 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: backup_first = d.pop("backup_first", UNSET) - def _parse_backup_location(data: object) -> Union[None, Unset, str]: + def _parse_backup_location(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) backup_location = _parse_backup_location(d.pop("backup_location", UNSET)) diff --git a/robosystems_client/models/delete_subgraph_response.py b/robosystems_client/models/delete_subgraph_response.py index 8f1d9d9..649cc51 100644 --- a/robosystems_client/models/delete_subgraph_response.py +++ b/robosystems_client/models/delete_subgraph_response.py @@ -1,6 +1,8 @@ +from __future__ import annotations + import datetime from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -19,15 +21,15 @@ class DeleteSubgraphResponse: graph_id (str): Deleted subgraph identifier status (str): Deletion status deleted_at (datetime.datetime): When deletion occurred - backup_location (Union[None, Unset, str]): Location of backup if created - message (Union[None, Unset, str]): Additional information about the deletion + backup_location (None | str | Unset): Location of backup if created + message (None | str | Unset): Additional information about the deletion """ graph_id: str status: str deleted_at: datetime.datetime - backup_location: Union[None, Unset, str] = UNSET - message: Union[None, Unset, str] = UNSET + backup_location: None | str | Unset = UNSET + message: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -37,13 +39,13 @@ def to_dict(self) -> dict[str, Any]: deleted_at = self.deleted_at.isoformat() - backup_location: Union[None, Unset, str] + backup_location: None | str | Unset if isinstance(self.backup_location, Unset): backup_location = UNSET else: backup_location = self.backup_location - message: Union[None, Unset, str] + message: None | str | Unset if isinstance(self.message, Unset): message = UNSET else: @@ -74,21 +76,21 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: deleted_at = isoparse(d.pop("deleted_at")) - def _parse_backup_location(data: object) -> Union[None, Unset, str]: + def _parse_backup_location(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) backup_location = _parse_backup_location(d.pop("backup_location", UNSET)) - def _parse_message(data: object) -> Union[None, Unset, str]: + def _parse_message(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) message = _parse_message(d.pop("message", UNSET)) diff --git a/robosystems_client/models/detailed_transactions_response.py b/robosystems_client/models/detailed_transactions_response.py index 2d6b1a0..b30563c 100644 --- a/robosystems_client/models/detailed_transactions_response.py +++ b/robosystems_client/models/detailed_transactions_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -24,18 +26,18 @@ class DetailedTransactionsResponse: """Detailed response for transaction queries. Attributes: - transactions (list['EnhancedCreditTransactionResponse']): + transactions (list[EnhancedCreditTransactionResponse]): summary (DetailedTransactionsResponseSummary): total_count (int): filtered_count (int): date_range (DetailedTransactionsResponseDateRange): """ - transactions: list["EnhancedCreditTransactionResponse"] - summary: "DetailedTransactionsResponseSummary" + transactions: list[EnhancedCreditTransactionResponse] + summary: DetailedTransactionsResponseSummary total_count: int filtered_count: int - date_range: "DetailedTransactionsResponseDateRange" + date_range: DetailedTransactionsResponseDateRange additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: diff --git a/robosystems_client/models/detailed_transactions_response_date_range.py b/robosystems_client/models/detailed_transactions_response_date_range.py index e874da1..65f2ce8 100644 --- a/robosystems_client/models/detailed_transactions_response_date_range.py +++ b/robosystems_client/models/detailed_transactions_response_date_range.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/detailed_transactions_response_summary.py b/robosystems_client/models/detailed_transactions_response_summary.py index 5060bbd..8b2d609 100644 --- a/robosystems_client/models/detailed_transactions_response_summary.py +++ b/robosystems_client/models/detailed_transactions_response_summary.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -15,7 +17,7 @@ class DetailedTransactionsResponseSummary: """ """ - additional_properties: dict[str, "TransactionSummaryResponse"] = _attrs_field( + additional_properties: dict[str, TransactionSummaryResponse] = _attrs_field( init=False, factory=dict ) @@ -46,10 +48,10 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) - def __getitem__(self, key: str) -> "TransactionSummaryResponse": + def __getitem__(self, key: str) -> TransactionSummaryResponse: return self.additional_properties[key] - def __setitem__(self, key: str, value: "TransactionSummaryResponse") -> None: + def __setitem__(self, key: str, value: TransactionSummaryResponse) -> None: self.additional_properties[key] = value def __delitem__(self, key: str) -> None: diff --git a/robosystems_client/models/email_verification_request.py b/robosystems_client/models/email_verification_request.py index 56a8d7d..6496c24 100644 --- a/robosystems_client/models/email_verification_request.py +++ b/robosystems_client/models/email_verification_request.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/enhanced_credit_transaction_response.py b/robosystems_client/models/enhanced_credit_transaction_response.py index 945d13a..61f2995 100644 --- a/robosystems_client/models/enhanced_credit_transaction_response.py +++ b/robosystems_client/models/enhanced_credit_transaction_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -26,22 +28,22 @@ class EnhancedCreditTransactionResponse: description (str): metadata (EnhancedCreditTransactionResponseMetadata): created_at (str): - operation_id (Union[None, Unset, str]): - idempotency_key (Union[None, Unset, str]): - request_id (Union[None, Unset, str]): - user_id (Union[None, Unset, str]): + operation_id (None | str | Unset): + idempotency_key (None | str | Unset): + request_id (None | str | Unset): + user_id (None | str | Unset): """ id: str type_: str amount: float description: str - metadata: "EnhancedCreditTransactionResponseMetadata" + metadata: EnhancedCreditTransactionResponseMetadata created_at: str - operation_id: Union[None, Unset, str] = UNSET - idempotency_key: Union[None, Unset, str] = UNSET - request_id: Union[None, Unset, str] = UNSET - user_id: Union[None, Unset, str] = UNSET + operation_id: None | str | Unset = UNSET + idempotency_key: None | str | Unset = UNSET + request_id: None | str | Unset = UNSET + user_id: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -57,25 +59,25 @@ def to_dict(self) -> dict[str, Any]: created_at = self.created_at - operation_id: Union[None, Unset, str] + operation_id: None | str | Unset if isinstance(self.operation_id, Unset): operation_id = UNSET else: operation_id = self.operation_id - idempotency_key: Union[None, Unset, str] + idempotency_key: None | str | Unset if isinstance(self.idempotency_key, Unset): idempotency_key = UNSET else: idempotency_key = self.idempotency_key - request_id: Union[None, Unset, str] + request_id: None | str | Unset if isinstance(self.request_id, Unset): request_id = UNSET else: request_id = self.request_id - user_id: Union[None, Unset, str] + user_id: None | str | Unset if isinstance(self.user_id, Unset): user_id = UNSET else: @@ -123,39 +125,39 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: created_at = d.pop("created_at") - def _parse_operation_id(data: object) -> Union[None, Unset, str]: + def _parse_operation_id(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) operation_id = _parse_operation_id(d.pop("operation_id", UNSET)) - def _parse_idempotency_key(data: object) -> Union[None, Unset, str]: + def _parse_idempotency_key(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) idempotency_key = _parse_idempotency_key(d.pop("idempotency_key", UNSET)) - def _parse_request_id(data: object) -> Union[None, Unset, str]: + def _parse_request_id(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) request_id = _parse_request_id(d.pop("request_id", UNSET)) - def _parse_user_id(data: object) -> Union[None, Unset, str]: + def _parse_user_id(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) user_id = _parse_user_id(d.pop("user_id", UNSET)) diff --git a/robosystems_client/models/enhanced_credit_transaction_response_metadata.py b/robosystems_client/models/enhanced_credit_transaction_response_metadata.py index e68ea9a..01e0528 100644 --- a/robosystems_client/models/enhanced_credit_transaction_response_metadata.py +++ b/robosystems_client/models/enhanced_credit_transaction_response_metadata.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/enhanced_file_status_layers.py b/robosystems_client/models/enhanced_file_status_layers.py new file mode 100644 index 0000000..fe594f3 --- /dev/null +++ b/robosystems_client/models/enhanced_file_status_layers.py @@ -0,0 +1,83 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +if TYPE_CHECKING: + from ..models.file_layer_status import FileLayerStatus + + +T = TypeVar("T", bound="EnhancedFileStatusLayers") + + +@_attrs_define +class EnhancedFileStatusLayers: + """ + Attributes: + s3 (FileLayerStatus): + duckdb (FileLayerStatus): + graph (FileLayerStatus): + """ + + s3: FileLayerStatus + duckdb: FileLayerStatus + graph: FileLayerStatus + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + s3 = self.s3.to_dict() + + duckdb = self.duckdb.to_dict() + + graph = self.graph.to_dict() + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "s3": s3, + "duckdb": duckdb, + "graph": graph, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.file_layer_status import FileLayerStatus + + d = dict(src_dict) + s3 = FileLayerStatus.from_dict(d.pop("s3")) + + duckdb = FileLayerStatus.from_dict(d.pop("duckdb")) + + graph = FileLayerStatus.from_dict(d.pop("graph")) + + enhanced_file_status_layers = cls( + s3=s3, + duckdb=duckdb, + graph=graph, + ) + + enhanced_file_status_layers.additional_properties = d + return enhanced_file_status_layers + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/error_response.py b/robosystems_client/models/error_response.py index 15e971e..4c3c396 100644 --- a/robosystems_client/models/error_response.py +++ b/robosystems_client/models/error_response.py @@ -1,6 +1,8 @@ +from __future__ import annotations + import datetime from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -24,33 +26,33 @@ class ErrorResponse: Attributes: detail (str): Human-readable error message explaining what went wrong - code (Union[None, Unset, str]): Machine-readable error code for programmatic handling - request_id (Union[None, Unset, str]): Unique request ID for tracking and debugging - timestamp (Union[None, Unset, datetime.datetime]): Timestamp when the error occurred + code (None | str | Unset): Machine-readable error code for programmatic handling + request_id (None | str | Unset): Unique request ID for tracking and debugging + timestamp (datetime.datetime | None | Unset): Timestamp when the error occurred """ detail: str - code: Union[None, Unset, str] = UNSET - request_id: Union[None, Unset, str] = UNSET - timestamp: Union[None, Unset, datetime.datetime] = UNSET + code: None | str | Unset = UNSET + request_id: None | str | Unset = UNSET + timestamp: datetime.datetime | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: detail = self.detail - code: Union[None, Unset, str] + code: None | str | Unset if isinstance(self.code, Unset): code = UNSET else: code = self.code - request_id: Union[None, Unset, str] + request_id: None | str | Unset if isinstance(self.request_id, Unset): request_id = UNSET else: request_id = self.request_id - timestamp: Union[None, Unset, str] + timestamp: None | str | Unset if isinstance(self.timestamp, Unset): timestamp = UNSET elif isinstance(self.timestamp, datetime.datetime): @@ -79,25 +81,25 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) detail = d.pop("detail") - def _parse_code(data: object) -> Union[None, Unset, str]: + def _parse_code(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) code = _parse_code(d.pop("code", UNSET)) - def _parse_request_id(data: object) -> Union[None, Unset, str]: + def _parse_request_id(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) request_id = _parse_request_id(d.pop("request_id", UNSET)) - def _parse_timestamp(data: object) -> Union[None, Unset, datetime.datetime]: + def _parse_timestamp(data: object) -> datetime.datetime | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -108,9 +110,9 @@ def _parse_timestamp(data: object) -> Union[None, Unset, datetime.datetime]: timestamp_type_0 = isoparse(data) return timestamp_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union[None, Unset, datetime.datetime], data) + return cast(datetime.datetime | None | Unset, data) timestamp = _parse_timestamp(d.pop("timestamp", UNSET)) diff --git a/robosystems_client/models/exchange_token_request.py b/robosystems_client/models/exchange_token_request.py index beb2e5e..d2401f7 100644 --- a/robosystems_client/models/exchange_token_request.py +++ b/robosystems_client/models/exchange_token_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -22,12 +24,12 @@ class ExchangeTokenRequest: Attributes: connection_id (str): Connection ID to update public_token (str): Temporary token from embedded auth - metadata (Union['ExchangeTokenRequestMetadataType0', None, Unset]): Provider-specific metadata + metadata (ExchangeTokenRequestMetadataType0 | None | Unset): Provider-specific metadata """ connection_id: str public_token: str - metadata: Union["ExchangeTokenRequestMetadataType0", None, Unset] = UNSET + metadata: ExchangeTokenRequestMetadataType0 | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -39,7 +41,7 @@ def to_dict(self) -> dict[str, Any]: public_token = self.public_token - metadata: Union[None, Unset, dict[str, Any]] + metadata: dict[str, Any] | None | Unset if isinstance(self.metadata, Unset): metadata = UNSET elif isinstance(self.metadata, ExchangeTokenRequestMetadataType0): @@ -73,7 +75,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: def _parse_metadata( data: object, - ) -> Union["ExchangeTokenRequestMetadataType0", None, Unset]: + ) -> ExchangeTokenRequestMetadataType0 | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -84,9 +86,9 @@ def _parse_metadata( metadata_type_0 = ExchangeTokenRequestMetadataType0.from_dict(data) return metadata_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["ExchangeTokenRequestMetadataType0", None, Unset], data) + return cast(ExchangeTokenRequestMetadataType0 | None | Unset, data) metadata = _parse_metadata(d.pop("metadata", UNSET)) diff --git a/robosystems_client/models/exchange_token_request_metadata_type_0.py b/robosystems_client/models/exchange_token_request_metadata_type_0.py index 70f5375..ce656cd 100644 --- a/robosystems_client/models/exchange_token_request_metadata_type_0.py +++ b/robosystems_client/models/exchange_token_request_metadata_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/execute_cypher_query_response_200.py b/robosystems_client/models/execute_cypher_query_response_200.py index 045068b..a3b1008 100644 --- a/robosystems_client/models/execute_cypher_query_response_200.py +++ b/robosystems_client/models/execute_cypher_query_response_200.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -19,35 +21,35 @@ class ExecuteCypherQueryResponse200: """ Attributes: - success (Union[Unset, bool]): - data (Union[Unset, list['ExecuteCypherQueryResponse200DataItem']]): - columns (Union[Unset, list[str]]): - row_count (Union[Unset, int]): - execution_time_ms (Union[Unset, float]): - graph_id (Union[Unset, str]): - timestamp (Union[Unset, str]): + success (bool | Unset): + data (list[ExecuteCypherQueryResponse200DataItem] | Unset): + columns (list[str] | Unset): + row_count (int | Unset): + execution_time_ms (float | Unset): + graph_id (str | Unset): + timestamp (str | Unset): """ - success: Union[Unset, bool] = UNSET - data: Union[Unset, list["ExecuteCypherQueryResponse200DataItem"]] = UNSET - columns: Union[Unset, list[str]] = UNSET - row_count: Union[Unset, int] = UNSET - execution_time_ms: Union[Unset, float] = UNSET - graph_id: Union[Unset, str] = UNSET - timestamp: Union[Unset, str] = UNSET + success: bool | Unset = UNSET + data: list[ExecuteCypherQueryResponse200DataItem] | Unset = UNSET + columns: list[str] | Unset = UNSET + row_count: int | Unset = UNSET + execution_time_ms: float | Unset = UNSET + graph_id: str | Unset = UNSET + timestamp: str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: success = self.success - data: Union[Unset, list[dict[str, Any]]] = UNSET + data: list[dict[str, Any]] | Unset = UNSET if not isinstance(self.data, Unset): data = [] for data_item_data in self.data: data_item = data_item_data.to_dict() data.append(data_item) - columns: Union[Unset, list[str]] = UNSET + columns: list[str] | Unset = UNSET if not isinstance(self.columns, Unset): columns = self.columns @@ -88,12 +90,14 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) success = d.pop("success", UNSET) - data = [] _data = d.pop("data", UNSET) - for data_item_data in _data or []: - data_item = ExecuteCypherQueryResponse200DataItem.from_dict(data_item_data) + data: list[ExecuteCypherQueryResponse200DataItem] | Unset = UNSET + if _data is not UNSET: + data = [] + for data_item_data in _data: + data_item = ExecuteCypherQueryResponse200DataItem.from_dict(data_item_data) - data.append(data_item) + data.append(data_item) columns = cast(list[str], d.pop("columns", UNSET)) diff --git a/robosystems_client/models/execute_cypher_query_response_200_data_item.py b/robosystems_client/models/execute_cypher_query_response_200_data_item.py index 6510c66..51dbb34 100644 --- a/robosystems_client/models/execute_cypher_query_response_200_data_item.py +++ b/robosystems_client/models/execute_cypher_query_response_200_data_item.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/fact_detail.py b/robosystems_client/models/fact_detail.py new file mode 100644 index 0000000..013f0d9 --- /dev/null +++ b/robosystems_client/models/fact_detail.py @@ -0,0 +1,109 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="FactDetail") + + +@_attrs_define +class FactDetail: + """ + Attributes: + fact_id (str): + element_uri (str): + element_name (str): + numeric_value (float): + unit (str): + period_start (str): + period_end (str): + """ + + fact_id: str + element_uri: str + element_name: str + numeric_value: float + unit: str + period_start: str + period_end: str + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + fact_id = self.fact_id + + element_uri = self.element_uri + + element_name = self.element_name + + numeric_value = self.numeric_value + + unit = self.unit + + period_start = self.period_start + + period_end = self.period_end + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "fact_id": fact_id, + "element_uri": element_uri, + "element_name": element_name, + "numeric_value": numeric_value, + "unit": unit, + "period_start": period_start, + "period_end": period_end, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + fact_id = d.pop("fact_id") + + element_uri = d.pop("element_uri") + + element_name = d.pop("element_name") + + numeric_value = d.pop("numeric_value") + + unit = d.pop("unit") + + period_start = d.pop("period_start") + + period_end = d.pop("period_end") + + fact_detail = cls( + fact_id=fact_id, + element_uri=element_uri, + element_name=element_name, + numeric_value=numeric_value, + unit=unit, + period_start=period_start, + period_end=period_end, + ) + + fact_detail.additional_properties = d + return fact_detail + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/file_info.py b/robosystems_client/models/file_info.py index 693b696..5bd839f 100644 --- a/robosystems_client/models/file_info.py +++ b/robosystems_client/models/file_info.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -20,9 +22,9 @@ class FileInfo: upload_status (str): Current upload status upload_method (str): Upload method used s3_key (str): S3 object key - row_count (Union[None, Unset, int]): Estimated row count - created_at (Union[None, Unset, str]): File creation timestamp - uploaded_at (Union[None, Unset, str]): File upload completion timestamp + row_count (int | None | Unset): Estimated row count + created_at (None | str | Unset): File creation timestamp + uploaded_at (None | str | Unset): File upload completion timestamp """ file_id: str @@ -32,9 +34,9 @@ class FileInfo: upload_status: str upload_method: str s3_key: str - row_count: Union[None, Unset, int] = UNSET - created_at: Union[None, Unset, str] = UNSET - uploaded_at: Union[None, Unset, str] = UNSET + row_count: int | None | Unset = UNSET + created_at: None | str | Unset = UNSET + uploaded_at: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -52,19 +54,19 @@ def to_dict(self) -> dict[str, Any]: s3_key = self.s3_key - row_count: Union[None, Unset, int] + row_count: int | None | Unset if isinstance(self.row_count, Unset): row_count = UNSET else: row_count = self.row_count - created_at: Union[None, Unset, str] + created_at: None | str | Unset if isinstance(self.created_at, Unset): created_at = UNSET else: created_at = self.created_at - uploaded_at: Union[None, Unset, str] + uploaded_at: None | str | Unset if isinstance(self.uploaded_at, Unset): uploaded_at = UNSET else: @@ -109,30 +111,30 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: s3_key = d.pop("s3_key") - def _parse_row_count(data: object) -> Union[None, Unset, int]: + def _parse_row_count(data: object) -> int | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, int], data) + return cast(int | None | Unset, data) row_count = _parse_row_count(d.pop("row_count", UNSET)) - def _parse_created_at(data: object) -> Union[None, Unset, str]: + def _parse_created_at(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) created_at = _parse_created_at(d.pop("created_at", UNSET)) - def _parse_uploaded_at(data: object) -> Union[None, Unset, str]: + def _parse_uploaded_at(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) uploaded_at = _parse_uploaded_at(d.pop("uploaded_at", UNSET)) diff --git a/robosystems_client/models/file_layer_status.py b/robosystems_client/models/file_layer_status.py new file mode 100644 index 0000000..206bb98 --- /dev/null +++ b/robosystems_client/models/file_layer_status.py @@ -0,0 +1,123 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="FileLayerStatus") + + +@_attrs_define +class FileLayerStatus: + """ + Attributes: + status (str): Layer status + timestamp (None | str | Unset): Status timestamp + row_count (int | None | Unset): Row count (if available) + size_bytes (int | None | Unset): Size in bytes (S3 layer only) + """ + + status: str + timestamp: None | str | Unset = UNSET + row_count: int | None | Unset = UNSET + size_bytes: int | None | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + status = self.status + + timestamp: None | str | Unset + if isinstance(self.timestamp, Unset): + timestamp = UNSET + else: + timestamp = self.timestamp + + row_count: int | None | Unset + if isinstance(self.row_count, Unset): + row_count = UNSET + else: + row_count = self.row_count + + size_bytes: int | None | Unset + if isinstance(self.size_bytes, Unset): + size_bytes = UNSET + else: + size_bytes = self.size_bytes + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "status": status, + } + ) + if timestamp is not UNSET: + field_dict["timestamp"] = timestamp + if row_count is not UNSET: + field_dict["row_count"] = row_count + if size_bytes is not UNSET: + field_dict["size_bytes"] = size_bytes + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + status = d.pop("status") + + def _parse_timestamp(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + timestamp = _parse_timestamp(d.pop("timestamp", UNSET)) + + def _parse_row_count(data: object) -> int | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(int | None | Unset, data) + + row_count = _parse_row_count(d.pop("row_count", UNSET)) + + def _parse_size_bytes(data: object) -> int | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(int | None | Unset, data) + + size_bytes = _parse_size_bytes(d.pop("size_bytes", UNSET)) + + file_layer_status = cls( + status=status, + timestamp=timestamp, + row_count=row_count, + size_bytes=size_bytes, + ) + + file_layer_status.additional_properties = d + return file_layer_status + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/file_status_update.py b/robosystems_client/models/file_status_update.py index af80594..6927ee2 100644 --- a/robosystems_client/models/file_status_update.py +++ b/robosystems_client/models/file_status_update.py @@ -1,8 +1,12 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar from attrs import define as _attrs_define +from ..types import UNSET, Unset + T = TypeVar("T", bound="FileStatusUpdate") @@ -12,13 +16,18 @@ class FileStatusUpdate: Attributes: status (str): File status: 'uploaded' (ready for ingest), 'disabled' (exclude from ingest), 'archived' (soft deleted) + ingest_to_graph (bool | Unset): Auto-ingest to graph after DuckDB staging. Default=false (batch mode). Set to + true for real-time incremental updates. Default: False. """ status: str + ingest_to_graph: bool | Unset = False def to_dict(self) -> dict[str, Any]: status = self.status + ingest_to_graph = self.ingest_to_graph + field_dict: dict[str, Any] = {} field_dict.update( @@ -26,6 +35,8 @@ def to_dict(self) -> dict[str, Any]: "status": status, } ) + if ingest_to_graph is not UNSET: + field_dict["ingest_to_graph"] = ingest_to_graph return field_dict @@ -34,8 +45,11 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) status = d.pop("status") + ingest_to_graph = d.pop("ingest_to_graph", UNSET) + file_status_update = cls( status=status, + ingest_to_graph=ingest_to_graph, ) return file_status_update diff --git a/robosystems_client/models/file_upload_request.py b/robosystems_client/models/file_upload_request.py index 05d9531..afe4737 100644 --- a/robosystems_client/models/file_upload_request.py +++ b/robosystems_client/models/file_upload_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union +from typing import Any, TypeVar, cast from attrs import define as _attrs_define @@ -13,17 +15,25 @@ class FileUploadRequest: """ Attributes: file_name (str): File name to upload - content_type (Union[Unset, str]): File MIME type Default: 'application/x-parquet'. + content_type (str | Unset): File MIME type Default: 'application/x-parquet'. + table_name (None | str | Unset): Table name to associate file with (required for first-class /files endpoint) """ file_name: str - content_type: Union[Unset, str] = "application/x-parquet" + content_type: str | Unset = "application/x-parquet" + table_name: None | str | Unset = UNSET def to_dict(self) -> dict[str, Any]: file_name = self.file_name content_type = self.content_type + table_name: None | str | Unset + if isinstance(self.table_name, Unset): + table_name = UNSET + else: + table_name = self.table_name + field_dict: dict[str, Any] = {} field_dict.update( @@ -33,6 +43,8 @@ def to_dict(self) -> dict[str, Any]: ) if content_type is not UNSET: field_dict["content_type"] = content_type + if table_name is not UNSET: + field_dict["table_name"] = table_name return field_dict @@ -43,9 +55,19 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: content_type = d.pop("content_type", UNSET) + def _parse_table_name(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + table_name = _parse_table_name(d.pop("table_name", UNSET)) + file_upload_request = cls( file_name=file_name, content_type=content_type, + table_name=table_name, ) return file_upload_request diff --git a/robosystems_client/models/file_upload_response.py b/robosystems_client/models/file_upload_response.py index a649274..0d11c44 100644 --- a/robosystems_client/models/file_upload_response.py +++ b/robosystems_client/models/file_upload_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/forgot_password_request.py b/robosystems_client/models/forgot_password_request.py index c5460ca..2faced7 100644 --- a/robosystems_client/models/forgot_password_request.py +++ b/robosystems_client/models/forgot_password_request.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/forgot_password_response_forgotpassword.py b/robosystems_client/models/forgot_password_response_forgotpassword.py index 215f943..5a2a50b 100644 --- a/robosystems_client/models/forgot_password_response_forgotpassword.py +++ b/robosystems_client/models/forgot_password_response_forgotpassword.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/get_current_auth_user_response_getcurrentauthuser.py b/robosystems_client/models/get_current_auth_user_response_getcurrentauthuser.py index dd82c8a..7ff2a9b 100644 --- a/robosystems_client/models/get_current_auth_user_response_getcurrentauthuser.py +++ b/robosystems_client/models/get_current_auth_user_response_getcurrentauthuser.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/get_file_info_response.py b/robosystems_client/models/get_file_info_response.py index 7a577f0..b625af7 100644 --- a/robosystems_client/models/get_file_info_response.py +++ b/robosystems_client/models/get_file_info_response.py @@ -1,11 +1,17 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field from ..types import UNSET, Unset +if TYPE_CHECKING: + from ..models.enhanced_file_status_layers import EnhancedFileStatusLayers + + T = TypeVar("T", bound="GetFileInfoResponse") @@ -22,10 +28,12 @@ class GetFileInfoResponse: upload_status (str): Current upload status upload_method (str): Upload method used s3_key (str): S3 object key - table_name (Union[None, Unset, str]): Table name - row_count (Union[None, Unset, int]): Estimated row count - created_at (Union[None, Unset, str]): File creation timestamp - uploaded_at (Union[None, Unset, str]): File upload completion timestamp + table_name (None | str | Unset): Table name + row_count (int | None | Unset): Estimated row count + created_at (None | str | Unset): File creation timestamp + uploaded_at (None | str | Unset): File upload completion timestamp + layers (EnhancedFileStatusLayers | None | Unset): Multi-layer pipeline status (S3 → DuckDB → Graph). Shows + status, timestamps, and row counts for each layer independently. """ file_id: str @@ -37,13 +45,16 @@ class GetFileInfoResponse: upload_status: str upload_method: str s3_key: str - table_name: Union[None, Unset, str] = UNSET - row_count: Union[None, Unset, int] = UNSET - created_at: Union[None, Unset, str] = UNSET - uploaded_at: Union[None, Unset, str] = UNSET + table_name: None | str | Unset = UNSET + row_count: int | None | Unset = UNSET + created_at: None | str | Unset = UNSET + uploaded_at: None | str | Unset = UNSET + layers: EnhancedFileStatusLayers | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: + from ..models.enhanced_file_status_layers import EnhancedFileStatusLayers + file_id = self.file_id graph_id = self.graph_id @@ -62,30 +73,38 @@ def to_dict(self) -> dict[str, Any]: s3_key = self.s3_key - table_name: Union[None, Unset, str] + table_name: None | str | Unset if isinstance(self.table_name, Unset): table_name = UNSET else: table_name = self.table_name - row_count: Union[None, Unset, int] + row_count: int | None | Unset if isinstance(self.row_count, Unset): row_count = UNSET else: row_count = self.row_count - created_at: Union[None, Unset, str] + created_at: None | str | Unset if isinstance(self.created_at, Unset): created_at = UNSET else: created_at = self.created_at - uploaded_at: Union[None, Unset, str] + uploaded_at: None | str | Unset if isinstance(self.uploaded_at, Unset): uploaded_at = UNSET else: uploaded_at = self.uploaded_at + layers: dict[str, Any] | None | Unset + if isinstance(self.layers, Unset): + layers = UNSET + elif isinstance(self.layers, EnhancedFileStatusLayers): + layers = self.layers.to_dict() + else: + layers = self.layers + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -109,11 +128,15 @@ def to_dict(self) -> dict[str, Any]: field_dict["created_at"] = created_at if uploaded_at is not UNSET: field_dict["uploaded_at"] = uploaded_at + if layers is not UNSET: + field_dict["layers"] = layers return field_dict @classmethod def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.enhanced_file_status_layers import EnhancedFileStatusLayers + d = dict(src_dict) file_id = d.pop("file_id") @@ -133,42 +156,59 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: s3_key = d.pop("s3_key") - def _parse_table_name(data: object) -> Union[None, Unset, str]: + def _parse_table_name(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) table_name = _parse_table_name(d.pop("table_name", UNSET)) - def _parse_row_count(data: object) -> Union[None, Unset, int]: + def _parse_row_count(data: object) -> int | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, int], data) + return cast(int | None | Unset, data) row_count = _parse_row_count(d.pop("row_count", UNSET)) - def _parse_created_at(data: object) -> Union[None, Unset, str]: + def _parse_created_at(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) created_at = _parse_created_at(d.pop("created_at", UNSET)) - def _parse_uploaded_at(data: object) -> Union[None, Unset, str]: + def _parse_uploaded_at(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) uploaded_at = _parse_uploaded_at(d.pop("uploaded_at", UNSET)) + def _parse_layers(data: object) -> EnhancedFileStatusLayers | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, dict): + raise TypeError() + layers_type_0 = EnhancedFileStatusLayers.from_dict(data) + + return layers_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(EnhancedFileStatusLayers | None | Unset, data) + + layers = _parse_layers(d.pop("layers", UNSET)) + get_file_info_response = cls( file_id=file_id, graph_id=graph_id, @@ -183,6 +223,7 @@ def _parse_uploaded_at(data: object) -> Union[None, Unset, str]: row_count=row_count, created_at=created_at, uploaded_at=uploaded_at, + layers=layers, ) get_file_info_response.additional_properties = d diff --git a/robosystems_client/models/get_operation_status_response_getoperationstatus.py b/robosystems_client/models/get_operation_status_response_getoperationstatus.py index 0c448fe..54b9486 100644 --- a/robosystems_client/models/get_operation_status_response_getoperationstatus.py +++ b/robosystems_client/models/get_operation_status_response_getoperationstatus.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/get_storage_usage_response_getstorageusage.py b/robosystems_client/models/get_storage_usage_response_getstorageusage.py index 358bffe..bd5c054 100644 --- a/robosystems_client/models/get_storage_usage_response_getstorageusage.py +++ b/robosystems_client/models/get_storage_usage_response_getstorageusage.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/graph_info.py b/robosystems_client/models/graph_info.py index fbbc406..0609ecc 100644 --- a/robosystems_client/models/graph_info.py +++ b/robosystems_client/models/graph_info.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -19,8 +21,8 @@ class GraphInfo: role (str): User's role/access level is_selected (bool): Whether this is the currently selected graph created_at (str): Creation timestamp - is_repository (Union[Unset, bool]): Whether this is a shared repository (vs user graph) Default: False. - repository_type (Union[None, Unset, str]): Repository type if isRepository=true + is_repository (bool | Unset): Whether this is a shared repository (vs user graph) Default: False. + repository_type (None | str | Unset): Repository type if isRepository=true """ graph_id: str @@ -28,8 +30,8 @@ class GraphInfo: role: str is_selected: bool created_at: str - is_repository: Union[Unset, bool] = False - repository_type: Union[None, Unset, str] = UNSET + is_repository: bool | Unset = False + repository_type: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -45,7 +47,7 @@ def to_dict(self) -> dict[str, Any]: is_repository = self.is_repository - repository_type: Union[None, Unset, str] + repository_type: None | str | Unset if isinstance(self.repository_type, Unset): repository_type = UNSET else: @@ -84,12 +86,12 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: is_repository = d.pop("isRepository", UNSET) - def _parse_repository_type(data: object) -> Union[None, Unset, str]: + def _parse_repository_type(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) repository_type = _parse_repository_type(d.pop("repositoryType", UNSET)) diff --git a/robosystems_client/models/graph_limits_response.py b/robosystems_client/models/graph_limits_response.py index 626c6a8..e80dc17 100644 --- a/robosystems_client/models/graph_limits_response.py +++ b/robosystems_client/models/graph_limits_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -32,19 +34,19 @@ class GraphLimitsResponse: copy_operations (CopyOperationLimits): Copy/ingestion operation limits. backups (BackupLimits): Backup operation limits. rate_limits (RateLimits): API rate limits. - credits_ (Union['CreditLimits', None, Unset]): AI credit limits (if applicable) + credits_ (CreditLimits | None | Unset): AI credit limits (if applicable) """ graph_id: str subscription_tier: str graph_tier: str is_shared_repository: bool - storage: "StorageLimits" - queries: "QueryLimits" - copy_operations: "CopyOperationLimits" - backups: "BackupLimits" - rate_limits: "RateLimits" - credits_: Union["CreditLimits", None, Unset] = UNSET + storage: StorageLimits + queries: QueryLimits + copy_operations: CopyOperationLimits + backups: BackupLimits + rate_limits: RateLimits + credits_: CreditLimits | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -68,7 +70,7 @@ def to_dict(self) -> dict[str, Any]: rate_limits = self.rate_limits.to_dict() - credits_: Union[None, Unset, dict[str, Any]] + credits_: dict[str, Any] | None | Unset if isinstance(self.credits_, Unset): credits_ = UNSET elif isinstance(self.credits_, CreditLimits): @@ -124,7 +126,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: rate_limits = RateLimits.from_dict(d.pop("rate_limits")) - def _parse_credits_(data: object) -> Union["CreditLimits", None, Unset]: + def _parse_credits_(data: object) -> CreditLimits | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -135,9 +137,9 @@ def _parse_credits_(data: object) -> Union["CreditLimits", None, Unset]: credits_type_0 = CreditLimits.from_dict(data) return credits_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["CreditLimits", None, Unset], data) + return cast(CreditLimits | None | Unset, data) credits_ = _parse_credits_(d.pop("credits", UNSET)) diff --git a/robosystems_client/models/graph_metadata.py b/robosystems_client/models/graph_metadata.py index 9da85d1..7bf4a39 100644 --- a/robosystems_client/models/graph_metadata.py +++ b/robosystems_client/models/graph_metadata.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -15,31 +17,31 @@ class GraphMetadata: Attributes: graph_name (str): Display name for the graph - description (Union[None, Unset, str]): Optional description - schema_extensions (Union[Unset, list[str]]): Schema extensions to enable - tags (Union[Unset, list[str]]): Tags for organizing graphs + description (None | str | Unset): Optional description + schema_extensions (list[str] | Unset): Schema extensions to enable + tags (list[str] | Unset): Tags for organizing graphs """ graph_name: str - description: Union[None, Unset, str] = UNSET - schema_extensions: Union[Unset, list[str]] = UNSET - tags: Union[Unset, list[str]] = UNSET + description: None | str | Unset = UNSET + schema_extensions: list[str] | Unset = UNSET + tags: list[str] | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: graph_name = self.graph_name - description: Union[None, Unset, str] + description: None | str | Unset if isinstance(self.description, Unset): description = UNSET else: description = self.description - schema_extensions: Union[Unset, list[str]] = UNSET + schema_extensions: list[str] | Unset = UNSET if not isinstance(self.schema_extensions, Unset): schema_extensions = self.schema_extensions - tags: Union[Unset, list[str]] = UNSET + tags: list[str] | Unset = UNSET if not isinstance(self.tags, Unset): tags = self.tags @@ -64,12 +66,12 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) graph_name = d.pop("graph_name") - def _parse_description(data: object) -> Union[None, Unset, str]: + def _parse_description(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) description = _parse_description(d.pop("description", UNSET)) diff --git a/robosystems_client/models/graph_metrics_response.py b/robosystems_client/models/graph_metrics_response.py index af17b51..6962189 100644 --- a/robosystems_client/models/graph_metrics_response.py +++ b/robosystems_client/models/graph_metrics_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -35,20 +37,20 @@ class GraphMetricsResponse: relationship_counts (GraphMetricsResponseRelationshipCounts): Relationship counts by type estimated_size (GraphMetricsResponseEstimatedSize): Database size estimates health_status (GraphMetricsResponseHealthStatus): Database health information - graph_name (Union[None, Unset, str]): Display name for the graph - user_role (Union[None, Unset, str]): User's role in this graph + graph_name (None | str | Unset): Display name for the graph + user_role (None | str | Unset): User's role in this graph """ graph_id: str timestamp: str total_nodes: int total_relationships: int - node_counts: "GraphMetricsResponseNodeCounts" - relationship_counts: "GraphMetricsResponseRelationshipCounts" - estimated_size: "GraphMetricsResponseEstimatedSize" - health_status: "GraphMetricsResponseHealthStatus" - graph_name: Union[None, Unset, str] = UNSET - user_role: Union[None, Unset, str] = UNSET + node_counts: GraphMetricsResponseNodeCounts + relationship_counts: GraphMetricsResponseRelationshipCounts + estimated_size: GraphMetricsResponseEstimatedSize + health_status: GraphMetricsResponseHealthStatus + graph_name: None | str | Unset = UNSET + user_role: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -68,13 +70,13 @@ def to_dict(self) -> dict[str, Any]: health_status = self.health_status.to_dict() - graph_name: Union[None, Unset, str] + graph_name: None | str | Unset if isinstance(self.graph_name, Unset): graph_name = UNSET else: graph_name = self.graph_name - user_role: Union[None, Unset, str] + user_role: None | str | Unset if isinstance(self.user_role, Unset): user_role = UNSET else: @@ -137,21 +139,21 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: health_status = GraphMetricsResponseHealthStatus.from_dict(d.pop("health_status")) - def _parse_graph_name(data: object) -> Union[None, Unset, str]: + def _parse_graph_name(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) graph_name = _parse_graph_name(d.pop("graph_name", UNSET)) - def _parse_user_role(data: object) -> Union[None, Unset, str]: + def _parse_user_role(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) user_role = _parse_user_role(d.pop("user_role", UNSET)) diff --git a/robosystems_client/models/graph_metrics_response_estimated_size.py b/robosystems_client/models/graph_metrics_response_estimated_size.py index 987ae1e..89b7ae8 100644 --- a/robosystems_client/models/graph_metrics_response_estimated_size.py +++ b/robosystems_client/models/graph_metrics_response_estimated_size.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/graph_metrics_response_health_status.py b/robosystems_client/models/graph_metrics_response_health_status.py index e1bb09b..6a6f8e5 100644 --- a/robosystems_client/models/graph_metrics_response_health_status.py +++ b/robosystems_client/models/graph_metrics_response_health_status.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/graph_metrics_response_node_counts.py b/robosystems_client/models/graph_metrics_response_node_counts.py index 9d8444d..89ee68b 100644 --- a/robosystems_client/models/graph_metrics_response_node_counts.py +++ b/robosystems_client/models/graph_metrics_response_node_counts.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/graph_metrics_response_relationship_counts.py b/robosystems_client/models/graph_metrics_response_relationship_counts.py index f88a2aa..3f09054 100644 --- a/robosystems_client/models/graph_metrics_response_relationship_counts.py +++ b/robosystems_client/models/graph_metrics_response_relationship_counts.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/graph_subscription_response.py b/robosystems_client/models/graph_subscription_response.py index 146c2f2..05831e3 100644 --- a/robosystems_client/models/graph_subscription_response.py +++ b/robosystems_client/models/graph_subscription_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -22,11 +24,11 @@ class GraphSubscriptionResponse: status (str): Subscription status base_price_cents (int): Base price in cents created_at (str): Creation timestamp - current_period_start (Union[None, Unset, str]): Current billing period start - current_period_end (Union[None, Unset, str]): Current billing period end - started_at (Union[None, Unset, str]): Subscription start date - canceled_at (Union[None, Unset, str]): Cancellation date - ends_at (Union[None, Unset, str]): Subscription end date (when access will be revoked, especially relevant for + current_period_start (None | str | Unset): Current billing period start + current_period_end (None | str | Unset): Current billing period end + started_at (None | str | Unset): Subscription start date + canceled_at (None | str | Unset): Cancellation date + ends_at (None | str | Unset): Subscription end date (when access will be revoked, especially relevant for cancelled subscriptions) """ @@ -38,11 +40,11 @@ class GraphSubscriptionResponse: status: str base_price_cents: int created_at: str - current_period_start: Union[None, Unset, str] = UNSET - current_period_end: Union[None, Unset, str] = UNSET - started_at: Union[None, Unset, str] = UNSET - canceled_at: Union[None, Unset, str] = UNSET - ends_at: Union[None, Unset, str] = UNSET + current_period_start: None | str | Unset = UNSET + current_period_end: None | str | Unset = UNSET + started_at: None | str | Unset = UNSET + canceled_at: None | str | Unset = UNSET + ends_at: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -62,31 +64,31 @@ def to_dict(self) -> dict[str, Any]: created_at = self.created_at - current_period_start: Union[None, Unset, str] + current_period_start: None | str | Unset if isinstance(self.current_period_start, Unset): current_period_start = UNSET else: current_period_start = self.current_period_start - current_period_end: Union[None, Unset, str] + current_period_end: None | str | Unset if isinstance(self.current_period_end, Unset): current_period_end = UNSET else: current_period_end = self.current_period_end - started_at: Union[None, Unset, str] + started_at: None | str | Unset if isinstance(self.started_at, Unset): started_at = UNSET else: started_at = self.started_at - canceled_at: Union[None, Unset, str] + canceled_at: None | str | Unset if isinstance(self.canceled_at, Unset): canceled_at = UNSET else: canceled_at = self.canceled_at - ends_at: Union[None, Unset, str] + ends_at: None | str | Unset if isinstance(self.ends_at, Unset): ends_at = UNSET else: @@ -138,50 +140,50 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: created_at = d.pop("created_at") - def _parse_current_period_start(data: object) -> Union[None, Unset, str]: + def _parse_current_period_start(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) current_period_start = _parse_current_period_start( d.pop("current_period_start", UNSET) ) - def _parse_current_period_end(data: object) -> Union[None, Unset, str]: + def _parse_current_period_end(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) current_period_end = _parse_current_period_end(d.pop("current_period_end", UNSET)) - def _parse_started_at(data: object) -> Union[None, Unset, str]: + def _parse_started_at(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) started_at = _parse_started_at(d.pop("started_at", UNSET)) - def _parse_canceled_at(data: object) -> Union[None, Unset, str]: + def _parse_canceled_at(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) canceled_at = _parse_canceled_at(d.pop("canceled_at", UNSET)) - def _parse_ends_at(data: object) -> Union[None, Unset, str]: + def _parse_ends_at(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) ends_at = _parse_ends_at(d.pop("ends_at", UNSET)) diff --git a/robosystems_client/models/graph_subscription_tier.py b/robosystems_client/models/graph_subscription_tier.py index f99bc6d..e56ec1b 100644 --- a/robosystems_client/models/graph_subscription_tier.py +++ b/robosystems_client/models/graph_subscription_tier.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -30,9 +32,9 @@ class GraphSubscriptionTier: priority_support (bool): Whether priority support is included api_rate_multiplier (float): API rate multiplier backend (str): Database backend (kuzu or neo4j) - max_queries_per_hour (Union[None, Unset, int]): Maximum queries per hour - max_subgraphs (Union[Unset, int]): Maximum subgraphs supported Default: 0. - instance_type (Union[None, Unset, str]): Instance type + max_queries_per_hour (int | None | Unset): Maximum queries per hour + max_subgraphs (int | Unset): Maximum subgraphs supported Default: 0. + instance_type (None | str | Unset): Instance type """ name: str @@ -48,9 +50,9 @@ class GraphSubscriptionTier: priority_support: bool api_rate_multiplier: float backend: str - max_queries_per_hour: Union[None, Unset, int] = UNSET - max_subgraphs: Union[Unset, int] = 0 - instance_type: Union[None, Unset, str] = UNSET + max_queries_per_hour: int | None | Unset = UNSET + max_subgraphs: int | Unset = 0 + instance_type: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -80,7 +82,7 @@ def to_dict(self) -> dict[str, Any]: backend = self.backend - max_queries_per_hour: Union[None, Unset, int] + max_queries_per_hour: int | None | Unset if isinstance(self.max_queries_per_hour, Unset): max_queries_per_hour = UNSET else: @@ -88,7 +90,7 @@ def to_dict(self) -> dict[str, Any]: max_subgraphs = self.max_subgraphs - instance_type: Union[None, Unset, str] + instance_type: None | str | Unset if isinstance(self.instance_type, Unset): instance_type = UNSET else: @@ -151,12 +153,12 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: backend = d.pop("backend") - def _parse_max_queries_per_hour(data: object) -> Union[None, Unset, int]: + def _parse_max_queries_per_hour(data: object) -> int | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, int], data) + return cast(int | None | Unset, data) max_queries_per_hour = _parse_max_queries_per_hour( d.pop("max_queries_per_hour", UNSET) @@ -164,12 +166,12 @@ def _parse_max_queries_per_hour(data: object) -> Union[None, Unset, int]: max_subgraphs = d.pop("max_subgraphs", UNSET) - def _parse_instance_type(data: object) -> Union[None, Unset, str]: + def _parse_instance_type(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) instance_type = _parse_instance_type(d.pop("instance_type", UNSET)) diff --git a/robosystems_client/models/graph_subscriptions.py b/robosystems_client/models/graph_subscriptions.py index 47f00af..8b8c79d 100644 --- a/robosystems_client/models/graph_subscriptions.py +++ b/robosystems_client/models/graph_subscriptions.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar, cast @@ -23,15 +25,15 @@ class GraphSubscriptions: Attributes: description (str): Description of graph subscriptions pricing_model (str): Pricing model type (per_graph or per_organization) - tiers (list['GraphSubscriptionTier']): Available infrastructure tiers + tiers (list[GraphSubscriptionTier]): Available infrastructure tiers storage (StorageInfo): Storage pricing information. notes (list[str]): Important notes """ description: str pricing_model: str - tiers: list["GraphSubscriptionTier"] - storage: "StorageInfo" + tiers: list[GraphSubscriptionTier] + storage: StorageInfo notes: list[str] additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) diff --git a/robosystems_client/models/graph_tier_backup.py b/robosystems_client/models/graph_tier_backup.py index 3a0af57..a8235d4 100644 --- a/robosystems_client/models/graph_tier_backup.py +++ b/robosystems_client/models/graph_tier_backup.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/graph_tier_copy_operations.py b/robosystems_client/models/graph_tier_copy_operations.py index 66760ec..2682407 100644 --- a/robosystems_client/models/graph_tier_copy_operations.py +++ b/robosystems_client/models/graph_tier_copy_operations.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/graph_tier_info.py b/robosystems_client/models/graph_tier_info.py index a3433d1..c2d7c23 100644 --- a/robosystems_client/models/graph_tier_info.py +++ b/robosystems_client/models/graph_tier_info.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -25,14 +27,14 @@ class GraphTierInfo: description (str): Tier description backend (str): Database backend (kuzu or neo4j) enabled (bool): Whether tier is available - max_subgraphs (Union[None, int]): Maximum subgraphs allowed + max_subgraphs (int | None): Maximum subgraphs allowed storage_limit_gb (int): Storage limit in GB monthly_credits (int): Monthly AI credits api_rate_multiplier (float): API rate limit multiplier features (list[str]): List of tier features instance (GraphTierInstance): Instance specifications for a tier. limits (GraphTierLimits): Resource limits for a tier. - monthly_price (Union[None, Unset, float]): Monthly price in USD + monthly_price (float | None | Unset): Monthly price in USD """ tier: str @@ -41,14 +43,14 @@ class GraphTierInfo: description: str backend: str enabled: bool - max_subgraphs: Union[None, int] + max_subgraphs: int | None storage_limit_gb: int monthly_credits: int api_rate_multiplier: float features: list[str] - instance: "GraphTierInstance" - limits: "GraphTierLimits" - monthly_price: Union[None, Unset, float] = UNSET + instance: GraphTierInstance + limits: GraphTierLimits + monthly_price: float | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -64,7 +66,7 @@ def to_dict(self) -> dict[str, Any]: enabled = self.enabled - max_subgraphs: Union[None, int] + max_subgraphs: int | None max_subgraphs = self.max_subgraphs storage_limit_gb = self.storage_limit_gb @@ -79,7 +81,7 @@ def to_dict(self) -> dict[str, Any]: limits = self.limits.to_dict() - monthly_price: Union[None, Unset, float] + monthly_price: float | None | Unset if isinstance(self.monthly_price, Unset): monthly_price = UNSET else: @@ -127,10 +129,10 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: enabled = d.pop("enabled") - def _parse_max_subgraphs(data: object) -> Union[None, int]: + def _parse_max_subgraphs(data: object) -> int | None: if data is None: return data - return cast(Union[None, int], data) + return cast(int | None, data) max_subgraphs = _parse_max_subgraphs(d.pop("max_subgraphs")) @@ -146,12 +148,12 @@ def _parse_max_subgraphs(data: object) -> Union[None, int]: limits = GraphTierLimits.from_dict(d.pop("limits")) - def _parse_monthly_price(data: object) -> Union[None, Unset, float]: + def _parse_monthly_price(data: object) -> float | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, float], data) + return cast(float | None | Unset, data) monthly_price = _parse_monthly_price(d.pop("monthly_price", UNSET)) diff --git a/robosystems_client/models/graph_tier_instance.py b/robosystems_client/models/graph_tier_instance.py index 60452a0..6bce083 100644 --- a/robosystems_client/models/graph_tier_instance.py +++ b/robosystems_client/models/graph_tier_instance.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/graph_tier_limits.py b/robosystems_client/models/graph_tier_limits.py index ac6c56a..168b85d 100644 --- a/robosystems_client/models/graph_tier_limits.py +++ b/robosystems_client/models/graph_tier_limits.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -19,16 +21,16 @@ class GraphTierLimits: Attributes: storage_gb (int): Storage limit in GB monthly_credits (int): Monthly credit allocation - max_subgraphs (Union[None, int]): Maximum subgraphs (null for unlimited) + max_subgraphs (int | None): Maximum subgraphs (null for unlimited) copy_operations (GraphTierCopyOperations): Copy operation limits for a tier. backup (GraphTierBackup): Backup configuration for a tier. """ storage_gb: int monthly_credits: int - max_subgraphs: Union[None, int] - copy_operations: "GraphTierCopyOperations" - backup: "GraphTierBackup" + max_subgraphs: int | None + copy_operations: GraphTierCopyOperations + backup: GraphTierBackup additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -36,7 +38,7 @@ def to_dict(self) -> dict[str, Any]: monthly_credits = self.monthly_credits - max_subgraphs: Union[None, int] + max_subgraphs: int | None max_subgraphs = self.max_subgraphs copy_operations = self.copy_operations.to_dict() @@ -67,10 +69,10 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: monthly_credits = d.pop("monthly_credits") - def _parse_max_subgraphs(data: object) -> Union[None, int]: + def _parse_max_subgraphs(data: object) -> int | None: if data is None: return data - return cast(Union[None, int], data) + return cast(int | None, data) max_subgraphs = _parse_max_subgraphs(d.pop("max_subgraphs")) diff --git a/robosystems_client/models/graph_usage_response.py b/robosystems_client/models/graph_usage_response.py index 7ee8a8e..6a6beb1 100644 --- a/robosystems_client/models/graph_usage_response.py +++ b/robosystems_client/models/graph_usage_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -26,19 +28,19 @@ class GraphUsageResponse: graph_id (str): Graph database identifier time_range (str): Time range for usage data timestamp (str): Usage collection timestamp - storage_summary (Union['StorageSummary', None, Unset]): Storage usage summary - credit_summary (Union['CreditSummary', None, Unset]): Credit consumption summary - performance_insights (Union['PerformanceInsights', None, Unset]): Performance analytics - recent_events (Union[Unset, list['GraphUsageResponseRecentEventsItem']]): Recent usage events + storage_summary (None | StorageSummary | Unset): Storage usage summary + credit_summary (CreditSummary | None | Unset): Credit consumption summary + performance_insights (None | PerformanceInsights | Unset): Performance analytics + recent_events (list[GraphUsageResponseRecentEventsItem] | Unset): Recent usage events """ graph_id: str time_range: str timestamp: str - storage_summary: Union["StorageSummary", None, Unset] = UNSET - credit_summary: Union["CreditSummary", None, Unset] = UNSET - performance_insights: Union["PerformanceInsights", None, Unset] = UNSET - recent_events: Union[Unset, list["GraphUsageResponseRecentEventsItem"]] = UNSET + storage_summary: None | StorageSummary | Unset = UNSET + credit_summary: CreditSummary | None | Unset = UNSET + performance_insights: None | PerformanceInsights | Unset = UNSET + recent_events: list[GraphUsageResponseRecentEventsItem] | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -52,7 +54,7 @@ def to_dict(self) -> dict[str, Any]: timestamp = self.timestamp - storage_summary: Union[None, Unset, dict[str, Any]] + storage_summary: dict[str, Any] | None | Unset if isinstance(self.storage_summary, Unset): storage_summary = UNSET elif isinstance(self.storage_summary, StorageSummary): @@ -60,7 +62,7 @@ def to_dict(self) -> dict[str, Any]: else: storage_summary = self.storage_summary - credit_summary: Union[None, Unset, dict[str, Any]] + credit_summary: dict[str, Any] | None | Unset if isinstance(self.credit_summary, Unset): credit_summary = UNSET elif isinstance(self.credit_summary, CreditSummary): @@ -68,7 +70,7 @@ def to_dict(self) -> dict[str, Any]: else: credit_summary = self.credit_summary - performance_insights: Union[None, Unset, dict[str, Any]] + performance_insights: dict[str, Any] | None | Unset if isinstance(self.performance_insights, Unset): performance_insights = UNSET elif isinstance(self.performance_insights, PerformanceInsights): @@ -76,7 +78,7 @@ def to_dict(self) -> dict[str, Any]: else: performance_insights = self.performance_insights - recent_events: Union[Unset, list[dict[str, Any]]] = UNSET + recent_events: list[dict[str, Any]] | Unset = UNSET if not isinstance(self.recent_events, Unset): recent_events = [] for recent_events_item_data in self.recent_events: @@ -119,7 +121,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: timestamp = d.pop("timestamp") - def _parse_storage_summary(data: object) -> Union["StorageSummary", None, Unset]: + def _parse_storage_summary(data: object) -> None | StorageSummary | Unset: if data is None: return data if isinstance(data, Unset): @@ -130,13 +132,13 @@ def _parse_storage_summary(data: object) -> Union["StorageSummary", None, Unset] storage_summary_type_0 = StorageSummary.from_dict(data) return storage_summary_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["StorageSummary", None, Unset], data) + return cast(None | StorageSummary | Unset, data) storage_summary = _parse_storage_summary(d.pop("storage_summary", UNSET)) - def _parse_credit_summary(data: object) -> Union["CreditSummary", None, Unset]: + def _parse_credit_summary(data: object) -> CreditSummary | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -147,15 +149,13 @@ def _parse_credit_summary(data: object) -> Union["CreditSummary", None, Unset]: credit_summary_type_0 = CreditSummary.from_dict(data) return credit_summary_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["CreditSummary", None, Unset], data) + return cast(CreditSummary | None | Unset, data) credit_summary = _parse_credit_summary(d.pop("credit_summary", UNSET)) - def _parse_performance_insights( - data: object, - ) -> Union["PerformanceInsights", None, Unset]: + def _parse_performance_insights(data: object) -> None | PerformanceInsights | Unset: if data is None: return data if isinstance(data, Unset): @@ -166,22 +166,24 @@ def _parse_performance_insights( performance_insights_type_0 = PerformanceInsights.from_dict(data) return performance_insights_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["PerformanceInsights", None, Unset], data) + return cast(None | PerformanceInsights | Unset, data) performance_insights = _parse_performance_insights( d.pop("performance_insights", UNSET) ) - recent_events = [] _recent_events = d.pop("recent_events", UNSET) - for recent_events_item_data in _recent_events or []: - recent_events_item = GraphUsageResponseRecentEventsItem.from_dict( - recent_events_item_data - ) + recent_events: list[GraphUsageResponseRecentEventsItem] | Unset = UNSET + if _recent_events is not UNSET: + recent_events = [] + for recent_events_item_data in _recent_events: + recent_events_item = GraphUsageResponseRecentEventsItem.from_dict( + recent_events_item_data + ) - recent_events.append(recent_events_item) + recent_events.append(recent_events_item) graph_usage_response = cls( graph_id=graph_id, diff --git a/robosystems_client/models/graph_usage_response_recent_events_item.py b/robosystems_client/models/graph_usage_response_recent_events_item.py index 122307f..d5f0b89 100644 --- a/robosystems_client/models/graph_usage_response_recent_events_item.py +++ b/robosystems_client/models/graph_usage_response_recent_events_item.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/health_status.py b/robosystems_client/models/health_status.py index 371a7f7..4bceaf9 100644 --- a/robosystems_client/models/health_status.py +++ b/robosystems_client/models/health_status.py @@ -1,6 +1,8 @@ +from __future__ import annotations + import datetime from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -22,12 +24,12 @@ class HealthStatus: Attributes: status (str): Current health status timestamp (datetime.datetime): Time of health check - details (Union['HealthStatusDetailsType0', None, Unset]): Additional health check details + details (HealthStatusDetailsType0 | None | Unset): Additional health check details """ status: str timestamp: datetime.datetime - details: Union["HealthStatusDetailsType0", None, Unset] = UNSET + details: HealthStatusDetailsType0 | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -37,7 +39,7 @@ def to_dict(self) -> dict[str, Any]: timestamp = self.timestamp.isoformat() - details: Union[None, Unset, dict[str, Any]] + details: dict[str, Any] | None | Unset if isinstance(self.details, Unset): details = UNSET elif isinstance(self.details, HealthStatusDetailsType0): @@ -67,7 +69,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: timestamp = isoparse(d.pop("timestamp")) - def _parse_details(data: object) -> Union["HealthStatusDetailsType0", None, Unset]: + def _parse_details(data: object) -> HealthStatusDetailsType0 | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -78,9 +80,9 @@ def _parse_details(data: object) -> Union["HealthStatusDetailsType0", None, Unse details_type_0 = HealthStatusDetailsType0.from_dict(data) return details_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["HealthStatusDetailsType0", None, Unset], data) + return cast(HealthStatusDetailsType0 | None | Unset, data) details = _parse_details(d.pop("details", UNSET)) diff --git a/robosystems_client/models/health_status_details_type_0.py b/robosystems_client/models/health_status_details_type_0.py index 1017b66..13cd935 100644 --- a/robosystems_client/models/health_status_details_type_0.py +++ b/robosystems_client/models/health_status_details_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/http_validation_error.py b/robosystems_client/models/http_validation_error.py index e44398f..8d872a1 100644 --- a/robosystems_client/models/http_validation_error.py +++ b/robosystems_client/models/http_validation_error.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -17,14 +19,14 @@ class HTTPValidationError: """ Attributes: - detail (Union[Unset, list['ValidationError']]): + detail (list[ValidationError] | Unset): """ - detail: Union[Unset, list["ValidationError"]] = UNSET + detail: list[ValidationError] | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: - detail: Union[Unset, list[dict[str, Any]]] = UNSET + detail: list[dict[str, Any]] | Unset = UNSET if not isinstance(self.detail, Unset): detail = [] for detail_item_data in self.detail: @@ -44,12 +46,14 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.validation_error import ValidationError d = dict(src_dict) - detail = [] _detail = d.pop("detail", UNSET) - for detail_item_data in _detail or []: - detail_item = ValidationError.from_dict(detail_item_data) + detail: list[ValidationError] | Unset = UNSET + if _detail is not UNSET: + detail = [] + for detail_item_data in _detail: + detail_item = ValidationError.from_dict(detail_item_data) - detail.append(detail_item) + detail.append(detail_item) http_validation_error = cls( detail=detail, diff --git a/robosystems_client/models/initial_entity_data.py b/robosystems_client/models/initial_entity_data.py index 571d11d..c128cda 100644 --- a/robosystems_client/models/initial_entity_data.py +++ b/robosystems_client/models/initial_entity_data.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -19,24 +21,24 @@ class InitialEntityData: Attributes: name (str): Entity name uri (str): Entity website or URI - cik (Union[None, Unset, str]): CIK number for SEC filings - sic (Union[None, Unset, str]): SIC code - sic_description (Union[None, Unset, str]): SIC description - category (Union[None, Unset, str]): Business category - state_of_incorporation (Union[None, Unset, str]): State of incorporation - fiscal_year_end (Union[None, Unset, str]): Fiscal year end (MMDD) - ein (Union[None, Unset, str]): Employer Identification Number + cik (None | str | Unset): CIK number for SEC filings + sic (None | str | Unset): SIC code + sic_description (None | str | Unset): SIC description + category (None | str | Unset): Business category + state_of_incorporation (None | str | Unset): State of incorporation + fiscal_year_end (None | str | Unset): Fiscal year end (MMDD) + ein (None | str | Unset): Employer Identification Number """ name: str uri: str - cik: Union[None, Unset, str] = UNSET - sic: Union[None, Unset, str] = UNSET - sic_description: Union[None, Unset, str] = UNSET - category: Union[None, Unset, str] = UNSET - state_of_incorporation: Union[None, Unset, str] = UNSET - fiscal_year_end: Union[None, Unset, str] = UNSET - ein: Union[None, Unset, str] = UNSET + cik: None | str | Unset = UNSET + sic: None | str | Unset = UNSET + sic_description: None | str | Unset = UNSET + category: None | str | Unset = UNSET + state_of_incorporation: None | str | Unset = UNSET + fiscal_year_end: None | str | Unset = UNSET + ein: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -44,43 +46,43 @@ def to_dict(self) -> dict[str, Any]: uri = self.uri - cik: Union[None, Unset, str] + cik: None | str | Unset if isinstance(self.cik, Unset): cik = UNSET else: cik = self.cik - sic: Union[None, Unset, str] + sic: None | str | Unset if isinstance(self.sic, Unset): sic = UNSET else: sic = self.sic - sic_description: Union[None, Unset, str] + sic_description: None | str | Unset if isinstance(self.sic_description, Unset): sic_description = UNSET else: sic_description = self.sic_description - category: Union[None, Unset, str] + category: None | str | Unset if isinstance(self.category, Unset): category = UNSET else: category = self.category - state_of_incorporation: Union[None, Unset, str] + state_of_incorporation: None | str | Unset if isinstance(self.state_of_incorporation, Unset): state_of_incorporation = UNSET else: state_of_incorporation = self.state_of_incorporation - fiscal_year_end: Union[None, Unset, str] + fiscal_year_end: None | str | Unset if isinstance(self.fiscal_year_end, Unset): fiscal_year_end = UNSET else: fiscal_year_end = self.fiscal_year_end - ein: Union[None, Unset, str] + ein: None | str | Unset if isinstance(self.ein, Unset): ein = UNSET else: @@ -118,68 +120,68 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: uri = d.pop("uri") - def _parse_cik(data: object) -> Union[None, Unset, str]: + def _parse_cik(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) cik = _parse_cik(d.pop("cik", UNSET)) - def _parse_sic(data: object) -> Union[None, Unset, str]: + def _parse_sic(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) sic = _parse_sic(d.pop("sic", UNSET)) - def _parse_sic_description(data: object) -> Union[None, Unset, str]: + def _parse_sic_description(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) sic_description = _parse_sic_description(d.pop("sic_description", UNSET)) - def _parse_category(data: object) -> Union[None, Unset, str]: + def _parse_category(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) category = _parse_category(d.pop("category", UNSET)) - def _parse_state_of_incorporation(data: object) -> Union[None, Unset, str]: + def _parse_state_of_incorporation(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) state_of_incorporation = _parse_state_of_incorporation( d.pop("state_of_incorporation", UNSET) ) - def _parse_fiscal_year_end(data: object) -> Union[None, Unset, str]: + def _parse_fiscal_year_end(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) fiscal_year_end = _parse_fiscal_year_end(d.pop("fiscal_year_end", UNSET)) - def _parse_ein(data: object) -> Union[None, Unset, str]: + def _parse_ein(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) ein = _parse_ein(d.pop("ein", UNSET)) diff --git a/robosystems_client/models/invite_member_request.py b/robosystems_client/models/invite_member_request.py index 02e4518..9a0446c 100644 --- a/robosystems_client/models/invite_member_request.py +++ b/robosystems_client/models/invite_member_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -16,17 +18,17 @@ class InviteMemberRequest: Attributes: email (str): - role (Union[None, OrgRole, Unset]): Default: OrgRole.MEMBER. + role (None | OrgRole | Unset): Default: OrgRole.MEMBER. """ email: str - role: Union[None, OrgRole, Unset] = OrgRole.MEMBER + role: None | OrgRole | Unset = OrgRole.MEMBER additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: email = self.email - role: Union[None, Unset, str] + role: None | str | Unset if isinstance(self.role, Unset): role = UNSET elif isinstance(self.role, OrgRole): @@ -51,7 +53,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) email = d.pop("email") - def _parse_role(data: object) -> Union[None, OrgRole, Unset]: + def _parse_role(data: object) -> None | OrgRole | Unset: if data is None: return data if isinstance(data, Unset): @@ -62,9 +64,9 @@ def _parse_role(data: object) -> Union[None, OrgRole, Unset]: role_type_0 = OrgRole(data) return role_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union[None, OrgRole, Unset], data) + return cast(None | OrgRole | Unset, data) role = _parse_role(d.pop("role", UNSET)) diff --git a/robosystems_client/models/invoice.py b/robosystems_client/models/invoice.py index 01eb2d7..9a4b40c 100644 --- a/robosystems_client/models/invoice.py +++ b/robosystems_client/models/invoice.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -24,13 +26,13 @@ class Invoice: amount_paid (int): Amount paid in cents currency (str): Currency code (usd) created (str): Invoice creation date (ISO format) - line_items (list['InvoiceLineItem']): Invoice line items - number (Union[None, Unset, str]): Invoice number - due_date (Union[None, Unset, str]): Invoice due date (ISO format) - paid_at (Union[None, Unset, str]): Payment date (ISO format) - invoice_pdf (Union[None, Unset, str]): PDF download URL - hosted_invoice_url (Union[None, Unset, str]): Hosted invoice URL - subscription_id (Union[None, Unset, str]): Associated subscription ID + line_items (list[InvoiceLineItem]): Invoice line items + number (None | str | Unset): Invoice number + due_date (None | str | Unset): Invoice due date (ISO format) + paid_at (None | str | Unset): Payment date (ISO format) + invoice_pdf (None | str | Unset): PDF download URL + hosted_invoice_url (None | str | Unset): Hosted invoice URL + subscription_id (None | str | Unset): Associated subscription ID """ id: str @@ -39,13 +41,13 @@ class Invoice: amount_paid: int currency: str created: str - line_items: list["InvoiceLineItem"] - number: Union[None, Unset, str] = UNSET - due_date: Union[None, Unset, str] = UNSET - paid_at: Union[None, Unset, str] = UNSET - invoice_pdf: Union[None, Unset, str] = UNSET - hosted_invoice_url: Union[None, Unset, str] = UNSET - subscription_id: Union[None, Unset, str] = UNSET + line_items: list[InvoiceLineItem] + number: None | str | Unset = UNSET + due_date: None | str | Unset = UNSET + paid_at: None | str | Unset = UNSET + invoice_pdf: None | str | Unset = UNSET + hosted_invoice_url: None | str | Unset = UNSET + subscription_id: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -66,37 +68,37 @@ def to_dict(self) -> dict[str, Any]: line_items_item = line_items_item_data.to_dict() line_items.append(line_items_item) - number: Union[None, Unset, str] + number: None | str | Unset if isinstance(self.number, Unset): number = UNSET else: number = self.number - due_date: Union[None, Unset, str] + due_date: None | str | Unset if isinstance(self.due_date, Unset): due_date = UNSET else: due_date = self.due_date - paid_at: Union[None, Unset, str] + paid_at: None | str | Unset if isinstance(self.paid_at, Unset): paid_at = UNSET else: paid_at = self.paid_at - invoice_pdf: Union[None, Unset, str] + invoice_pdf: None | str | Unset if isinstance(self.invoice_pdf, Unset): invoice_pdf = UNSET else: invoice_pdf = self.invoice_pdf - hosted_invoice_url: Union[None, Unset, str] + hosted_invoice_url: None | str | Unset if isinstance(self.hosted_invoice_url, Unset): hosted_invoice_url = UNSET else: hosted_invoice_url = self.hosted_invoice_url - subscription_id: Union[None, Unset, str] + subscription_id: None | str | Unset if isinstance(self.subscription_id, Unset): subscription_id = UNSET else: @@ -154,57 +156,57 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: line_items.append(line_items_item) - def _parse_number(data: object) -> Union[None, Unset, str]: + def _parse_number(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) number = _parse_number(d.pop("number", UNSET)) - def _parse_due_date(data: object) -> Union[None, Unset, str]: + def _parse_due_date(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) due_date = _parse_due_date(d.pop("due_date", UNSET)) - def _parse_paid_at(data: object) -> Union[None, Unset, str]: + def _parse_paid_at(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) paid_at = _parse_paid_at(d.pop("paid_at", UNSET)) - def _parse_invoice_pdf(data: object) -> Union[None, Unset, str]: + def _parse_invoice_pdf(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) invoice_pdf = _parse_invoice_pdf(d.pop("invoice_pdf", UNSET)) - def _parse_hosted_invoice_url(data: object) -> Union[None, Unset, str]: + def _parse_hosted_invoice_url(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) hosted_invoice_url = _parse_hosted_invoice_url(d.pop("hosted_invoice_url", UNSET)) - def _parse_subscription_id(data: object) -> Union[None, Unset, str]: + def _parse_subscription_id(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) subscription_id = _parse_subscription_id(d.pop("subscription_id", UNSET)) diff --git a/robosystems_client/models/invoice_line_item.py b/robosystems_client/models/invoice_line_item.py index aad6cc2..c9c9c90 100644 --- a/robosystems_client/models/invoice_line_item.py +++ b/robosystems_client/models/invoice_line_item.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -17,15 +19,15 @@ class InvoiceLineItem: description (str): Line item description amount (int): Amount in cents quantity (int): Quantity - period_start (Union[None, Unset, str]): Billing period start - period_end (Union[None, Unset, str]): Billing period end + period_start (None | str | Unset): Billing period start + period_end (None | str | Unset): Billing period end """ description: str amount: int quantity: int - period_start: Union[None, Unset, str] = UNSET - period_end: Union[None, Unset, str] = UNSET + period_start: None | str | Unset = UNSET + period_end: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -35,13 +37,13 @@ def to_dict(self) -> dict[str, Any]: quantity = self.quantity - period_start: Union[None, Unset, str] + period_start: None | str | Unset if isinstance(self.period_start, Unset): period_start = UNSET else: period_start = self.period_start - period_end: Union[None, Unset, str] + period_end: None | str | Unset if isinstance(self.period_end, Unset): period_end = UNSET else: @@ -72,21 +74,21 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: quantity = d.pop("quantity") - def _parse_period_start(data: object) -> Union[None, Unset, str]: + def _parse_period_start(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) period_start = _parse_period_start(d.pop("period_start", UNSET)) - def _parse_period_end(data: object) -> Union[None, Unset, str]: + def _parse_period_end(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) period_end = _parse_period_end(d.pop("period_end", UNSET)) diff --git a/robosystems_client/models/invoices_response.py b/robosystems_client/models/invoices_response.py index 86fd7b3..9e1761f 100644 --- a/robosystems_client/models/invoices_response.py +++ b/robosystems_client/models/invoices_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -16,12 +18,12 @@ class InvoicesResponse: """Response for invoice list. Attributes: - invoices (list['Invoice']): List of invoices + invoices (list[Invoice]): List of invoices total_count (int): Total number of invoices has_more (bool): Whether more invoices are available """ - invoices: list["Invoice"] + invoices: list[Invoice] total_count: int has_more: bool additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) diff --git a/robosystems_client/models/link_token_request.py b/robosystems_client/models/link_token_request.py index a6df4f2..7d28e0e 100644 --- a/robosystems_client/models/link_token_request.py +++ b/robosystems_client/models/link_token_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -21,16 +23,16 @@ class LinkTokenRequest: Attributes: entity_id (str): Entity identifier user_id (str): User identifier - provider (Union[LinkTokenRequestProviderType0, None, Unset]): Provider type (defaults based on connection) - products (Union[None, Unset, list[str]]): Data products to request (provider-specific) - options (Union['LinkTokenRequestOptionsType0', None, Unset]): Provider-specific options + provider (LinkTokenRequestProviderType0 | None | Unset): Provider type (defaults based on connection) + products (list[str] | None | Unset): Data products to request (provider-specific) + options (LinkTokenRequestOptionsType0 | None | Unset): Provider-specific options """ entity_id: str user_id: str - provider: Union[LinkTokenRequestProviderType0, None, Unset] = UNSET - products: Union[None, Unset, list[str]] = UNSET - options: Union["LinkTokenRequestOptionsType0", None, Unset] = UNSET + provider: LinkTokenRequestProviderType0 | None | Unset = UNSET + products: list[str] | None | Unset = UNSET + options: LinkTokenRequestOptionsType0 | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -40,7 +42,7 @@ def to_dict(self) -> dict[str, Any]: user_id = self.user_id - provider: Union[None, Unset, str] + provider: None | str | Unset if isinstance(self.provider, Unset): provider = UNSET elif isinstance(self.provider, LinkTokenRequestProviderType0): @@ -48,7 +50,7 @@ def to_dict(self) -> dict[str, Any]: else: provider = self.provider - products: Union[None, Unset, list[str]] + products: list[str] | None | Unset if isinstance(self.products, Unset): products = UNSET elif isinstance(self.products, list): @@ -57,7 +59,7 @@ def to_dict(self) -> dict[str, Any]: else: products = self.products - options: Union[None, Unset, dict[str, Any]] + options: dict[str, Any] | None | Unset if isinstance(self.options, Unset): options = UNSET elif isinstance(self.options, LinkTokenRequestOptionsType0): @@ -91,9 +93,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: user_id = d.pop("user_id") - def _parse_provider( - data: object, - ) -> Union[LinkTokenRequestProviderType0, None, Unset]: + def _parse_provider(data: object) -> LinkTokenRequestProviderType0 | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -104,13 +104,13 @@ def _parse_provider( provider_type_0 = LinkTokenRequestProviderType0(data) return provider_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union[LinkTokenRequestProviderType0, None, Unset], data) + return cast(LinkTokenRequestProviderType0 | None | Unset, data) provider = _parse_provider(d.pop("provider", UNSET)) - def _parse_products(data: object) -> Union[None, Unset, list[str]]: + def _parse_products(data: object) -> list[str] | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -121,15 +121,13 @@ def _parse_products(data: object) -> Union[None, Unset, list[str]]: products_type_0 = cast(list[str], data) return products_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union[None, Unset, list[str]], data) + return cast(list[str] | None | Unset, data) products = _parse_products(d.pop("products", UNSET)) - def _parse_options( - data: object, - ) -> Union["LinkTokenRequestOptionsType0", None, Unset]: + def _parse_options(data: object) -> LinkTokenRequestOptionsType0 | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -140,9 +138,9 @@ def _parse_options( options_type_0 = LinkTokenRequestOptionsType0.from_dict(data) return options_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["LinkTokenRequestOptionsType0", None, Unset], data) + return cast(LinkTokenRequestOptionsType0 | None | Unset, data) options = _parse_options(d.pop("options", UNSET)) diff --git a/robosystems_client/models/link_token_request_options_type_0.py b/robosystems_client/models/link_token_request_options_type_0.py index 057de18..a419405 100644 --- a/robosystems_client/models/link_token_request_options_type_0.py +++ b/robosystems_client/models/link_token_request_options_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/list_org_graphs_response_200_item.py b/robosystems_client/models/list_org_graphs_response_200_item.py index 32e95dd..4793e26 100644 --- a/robosystems_client/models/list_org_graphs_response_200_item.py +++ b/robosystems_client/models/list_org_graphs_response_200_item.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/list_subgraphs_response.py b/robosystems_client/models/list_subgraphs_response.py index 01ac754..a5a11dd 100644 --- a/robosystems_client/models/list_subgraphs_response.py +++ b/robosystems_client/models/list_subgraphs_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -24,9 +26,9 @@ class ListSubgraphsResponse: subgraphs_enabled (bool): Whether subgraphs are enabled for this tier (requires Kuzu Large/XLarge or Neo4j Enterprise XLarge) subgraph_count (int): Total number of subgraphs - subgraphs (list['SubgraphSummary']): List of subgraphs - max_subgraphs (Union[None, Unset, int]): Maximum allowed subgraphs for this tier (None = unlimited) - total_size_mb (Union[None, Unset, float]): Combined size of all subgraphs in megabytes + subgraphs (list[SubgraphSummary]): List of subgraphs + max_subgraphs (int | None | Unset): Maximum allowed subgraphs for this tier (None = unlimited) + total_size_mb (float | None | Unset): Combined size of all subgraphs in megabytes """ parent_graph_id: str @@ -34,9 +36,9 @@ class ListSubgraphsResponse: parent_graph_tier: str subgraphs_enabled: bool subgraph_count: int - subgraphs: list["SubgraphSummary"] - max_subgraphs: Union[None, Unset, int] = UNSET - total_size_mb: Union[None, Unset, float] = UNSET + subgraphs: list[SubgraphSummary] + max_subgraphs: int | None | Unset = UNSET + total_size_mb: float | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -55,13 +57,13 @@ def to_dict(self) -> dict[str, Any]: subgraphs_item = subgraphs_item_data.to_dict() subgraphs.append(subgraphs_item) - max_subgraphs: Union[None, Unset, int] + max_subgraphs: int | None | Unset if isinstance(self.max_subgraphs, Unset): max_subgraphs = UNSET else: max_subgraphs = self.max_subgraphs - total_size_mb: Union[None, Unset, float] + total_size_mb: float | None | Unset if isinstance(self.total_size_mb, Unset): total_size_mb = UNSET else: @@ -108,21 +110,21 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: subgraphs.append(subgraphs_item) - def _parse_max_subgraphs(data: object) -> Union[None, Unset, int]: + def _parse_max_subgraphs(data: object) -> int | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, int], data) + return cast(int | None | Unset, data) max_subgraphs = _parse_max_subgraphs(d.pop("max_subgraphs", UNSET)) - def _parse_total_size_mb(data: object) -> Union[None, Unset, float]: + def _parse_total_size_mb(data: object) -> float | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, float], data) + return cast(float | None | Unset, data) total_size_mb = _parse_total_size_mb(d.pop("total_size_mb", UNSET)) diff --git a/robosystems_client/models/list_table_files_response.py b/robosystems_client/models/list_table_files_response.py index b53e776..fd733b4 100644 --- a/robosystems_client/models/list_table_files_response.py +++ b/robosystems_client/models/list_table_files_response.py @@ -1,9 +1,13 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field +from ..types import UNSET, Unset + if TYPE_CHECKING: from ..models.file_info import FileInfo @@ -16,24 +20,22 @@ class ListTableFilesResponse: """ Attributes: graph_id (str): Graph database identifier - table_name (str): Table name - files (list['FileInfo']): List of files in the table + files (list[FileInfo]): List of files in the table total_files (int): Total number of files total_size_bytes (int): Total size of all files in bytes + table_name (None | str | Unset): Table name (null if listing all files in graph) """ graph_id: str - table_name: str - files: list["FileInfo"] + files: list[FileInfo] total_files: int total_size_bytes: int + table_name: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: graph_id = self.graph_id - table_name = self.table_name - files = [] for files_item_data in self.files: files_item = files_item_data.to_dict() @@ -43,17 +45,24 @@ def to_dict(self) -> dict[str, Any]: total_size_bytes = self.total_size_bytes + table_name: None | str | Unset + if isinstance(self.table_name, Unset): + table_name = UNSET + else: + table_name = self.table_name + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( { "graph_id": graph_id, - "table_name": table_name, "files": files, "total_files": total_files, "total_size_bytes": total_size_bytes, } ) + if table_name is not UNSET: + field_dict["table_name"] = table_name return field_dict @@ -64,8 +73,6 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) graph_id = d.pop("graph_id") - table_name = d.pop("table_name") - files = [] _files = d.pop("files") for files_item_data in _files: @@ -77,12 +84,21 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: total_size_bytes = d.pop("total_size_bytes") + def _parse_table_name(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + table_name = _parse_table_name(d.pop("table_name", UNSET)) + list_table_files_response = cls( graph_id=graph_id, - table_name=table_name, files=files, total_files=total_files, total_size_bytes=total_size_bytes, + table_name=table_name, ) list_table_files_response.additional_properties = d diff --git a/robosystems_client/models/login_request.py b/robosystems_client/models/login_request.py index 22eb618..0e5ac0b 100644 --- a/robosystems_client/models/login_request.py +++ b/robosystems_client/models/login_request.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/logout_user_response_logoutuser.py b/robosystems_client/models/logout_user_response_logoutuser.py index 03318ca..fc1d784 100644 --- a/robosystems_client/models/logout_user_response_logoutuser.py +++ b/robosystems_client/models/logout_user_response_logoutuser.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/bulk_ingest_request.py b/robosystems_client/models/materialize_request.py similarity index 51% rename from robosystems_client/models/bulk_ingest_request.py rename to robosystems_client/models/materialize_request.py index f8d7185..014067d 100644 --- a/robosystems_client/models/bulk_ingest_request.py +++ b/robosystems_client/models/materialize_request.py @@ -1,50 +1,60 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union +from typing import Any, TypeVar from attrs import define as _attrs_define from ..types import UNSET, Unset -T = TypeVar("T", bound="BulkIngestRequest") +T = TypeVar("T", bound="MaterializeRequest") @_attrs_define -class BulkIngestRequest: +class MaterializeRequest: """ Attributes: - ignore_errors (Union[Unset, bool]): Continue ingestion on row errors Default: True. - rebuild (Union[Unset, bool]): Rebuild graph database from scratch before ingestion. Safe operation - staged data - is the source of truth, graph can always be regenerated. Default: False. + force (bool | Unset): Force materialization even if graph is not stale Default: False. + rebuild (bool | Unset): Delete and recreate graph database before materialization Default: False. + ignore_errors (bool | Unset): Continue ingestion on row errors Default: True. """ - ignore_errors: Union[Unset, bool] = True - rebuild: Union[Unset, bool] = False + force: bool | Unset = False + rebuild: bool | Unset = False + ignore_errors: bool | Unset = True def to_dict(self) -> dict[str, Any]: - ignore_errors = self.ignore_errors + force = self.force rebuild = self.rebuild + ignore_errors = self.ignore_errors + field_dict: dict[str, Any] = {} field_dict.update({}) - if ignore_errors is not UNSET: - field_dict["ignore_errors"] = ignore_errors + if force is not UNSET: + field_dict["force"] = force if rebuild is not UNSET: field_dict["rebuild"] = rebuild + if ignore_errors is not UNSET: + field_dict["ignore_errors"] = ignore_errors return field_dict @classmethod def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) - ignore_errors = d.pop("ignore_errors", UNSET) + force = d.pop("force", UNSET) rebuild = d.pop("rebuild", UNSET) - bulk_ingest_request = cls( - ignore_errors=ignore_errors, + ignore_errors = d.pop("ignore_errors", UNSET) + + materialize_request = cls( + force=force, rebuild=rebuild, + ignore_errors=ignore_errors, ) - return bulk_ingest_request + return materialize_request diff --git a/robosystems_client/models/materialize_response.py b/robosystems_client/models/materialize_response.py new file mode 100644 index 0000000..ef83c58 --- /dev/null +++ b/robosystems_client/models/materialize_response.py @@ -0,0 +1,131 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="MaterializeResponse") + + +@_attrs_define +class MaterializeResponse: + """ + Attributes: + status (str): Materialization status + graph_id (str): Graph database identifier + was_stale (bool): Whether graph was stale before materialization + tables_materialized (list[str]): List of tables successfully materialized + total_rows (int): Total rows materialized across all tables + execution_time_ms (float): Total materialization time + message (str): Human-readable status message + stale_reason (None | str | Unset): Reason graph was stale + """ + + status: str + graph_id: str + was_stale: bool + tables_materialized: list[str] + total_rows: int + execution_time_ms: float + message: str + stale_reason: None | str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + status = self.status + + graph_id = self.graph_id + + was_stale = self.was_stale + + tables_materialized = self.tables_materialized + + total_rows = self.total_rows + + execution_time_ms = self.execution_time_ms + + message = self.message + + stale_reason: None | str | Unset + if isinstance(self.stale_reason, Unset): + stale_reason = UNSET + else: + stale_reason = self.stale_reason + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "status": status, + "graph_id": graph_id, + "was_stale": was_stale, + "tables_materialized": tables_materialized, + "total_rows": total_rows, + "execution_time_ms": execution_time_ms, + "message": message, + } + ) + if stale_reason is not UNSET: + field_dict["stale_reason"] = stale_reason + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + status = d.pop("status") + + graph_id = d.pop("graph_id") + + was_stale = d.pop("was_stale") + + tables_materialized = cast(list[str], d.pop("tables_materialized")) + + total_rows = d.pop("total_rows") + + execution_time_ms = d.pop("execution_time_ms") + + message = d.pop("message") + + def _parse_stale_reason(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + stale_reason = _parse_stale_reason(d.pop("stale_reason", UNSET)) + + materialize_response = cls( + status=status, + graph_id=graph_id, + was_stale=was_stale, + tables_materialized=tables_materialized, + total_rows=total_rows, + execution_time_ms=execution_time_ms, + message=message, + stale_reason=stale_reason, + ) + + materialize_response.additional_properties = d + return materialize_response + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/materialize_status_response.py b/robosystems_client/models/materialize_status_response.py new file mode 100644 index 0000000..072a45b --- /dev/null +++ b/robosystems_client/models/materialize_status_response.py @@ -0,0 +1,172 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="MaterializeStatusResponse") + + +@_attrs_define +class MaterializeStatusResponse: + """ + Attributes: + graph_id (str): Graph database identifier + is_stale (bool): Whether graph is currently stale + message (str): Human-readable status summary + stale_reason (None | str | Unset): Reason for staleness if applicable + stale_since (None | str | Unset): When graph became stale (ISO timestamp) + last_materialized_at (None | str | Unset): When graph was last materialized (ISO timestamp) + materialization_count (int | Unset): Total number of materializations performed Default: 0. + hours_since_materialization (float | None | Unset): Hours since last materialization + """ + + graph_id: str + is_stale: bool + message: str + stale_reason: None | str | Unset = UNSET + stale_since: None | str | Unset = UNSET + last_materialized_at: None | str | Unset = UNSET + materialization_count: int | Unset = 0 + hours_since_materialization: float | None | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + graph_id = self.graph_id + + is_stale = self.is_stale + + message = self.message + + stale_reason: None | str | Unset + if isinstance(self.stale_reason, Unset): + stale_reason = UNSET + else: + stale_reason = self.stale_reason + + stale_since: None | str | Unset + if isinstance(self.stale_since, Unset): + stale_since = UNSET + else: + stale_since = self.stale_since + + last_materialized_at: None | str | Unset + if isinstance(self.last_materialized_at, Unset): + last_materialized_at = UNSET + else: + last_materialized_at = self.last_materialized_at + + materialization_count = self.materialization_count + + hours_since_materialization: float | None | Unset + if isinstance(self.hours_since_materialization, Unset): + hours_since_materialization = UNSET + else: + hours_since_materialization = self.hours_since_materialization + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "graph_id": graph_id, + "is_stale": is_stale, + "message": message, + } + ) + if stale_reason is not UNSET: + field_dict["stale_reason"] = stale_reason + if stale_since is not UNSET: + field_dict["stale_since"] = stale_since + if last_materialized_at is not UNSET: + field_dict["last_materialized_at"] = last_materialized_at + if materialization_count is not UNSET: + field_dict["materialization_count"] = materialization_count + if hours_since_materialization is not UNSET: + field_dict["hours_since_materialization"] = hours_since_materialization + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + graph_id = d.pop("graph_id") + + is_stale = d.pop("is_stale") + + message = d.pop("message") + + def _parse_stale_reason(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + stale_reason = _parse_stale_reason(d.pop("stale_reason", UNSET)) + + def _parse_stale_since(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + stale_since = _parse_stale_since(d.pop("stale_since", UNSET)) + + def _parse_last_materialized_at(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + last_materialized_at = _parse_last_materialized_at( + d.pop("last_materialized_at", UNSET) + ) + + materialization_count = d.pop("materialization_count", UNSET) + + def _parse_hours_since_materialization(data: object) -> float | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(float | None | Unset, data) + + hours_since_materialization = _parse_hours_since_materialization( + d.pop("hours_since_materialization", UNSET) + ) + + materialize_status_response = cls( + graph_id=graph_id, + is_stale=is_stale, + message=message, + stale_reason=stale_reason, + stale_since=stale_since, + last_materialized_at=last_materialized_at, + materialization_count=materialization_count, + hours_since_materialization=hours_since_materialization, + ) + + materialize_status_response.additional_properties = d + return materialize_status_response + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/mcp_tool_call.py b/robosystems_client/models/mcp_tool_call.py index b3c190c..dea9860 100644 --- a/robosystems_client/models/mcp_tool_call.py +++ b/robosystems_client/models/mcp_tool_call.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -19,17 +21,17 @@ class MCPToolCall: Attributes: name (str): Name of the MCP tool to execute - arguments (Union[Unset, MCPToolCallArguments]): Arguments to pass to the tool + arguments (MCPToolCallArguments | Unset): Arguments to pass to the tool """ name: str - arguments: Union[Unset, "MCPToolCallArguments"] = UNSET + arguments: MCPToolCallArguments | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: name = self.name - arguments: Union[Unset, dict[str, Any]] = UNSET + arguments: dict[str, Any] | Unset = UNSET if not isinstance(self.arguments, Unset): arguments = self.arguments.to_dict() @@ -53,7 +55,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: name = d.pop("name") _arguments = d.pop("arguments", UNSET) - arguments: Union[Unset, MCPToolCallArguments] + arguments: MCPToolCallArguments | Unset if isinstance(_arguments, Unset): arguments = UNSET else: diff --git a/robosystems_client/models/mcp_tool_call_arguments.py b/robosystems_client/models/mcp_tool_call_arguments.py index 75e9c34..77c1689 100644 --- a/robosystems_client/models/mcp_tool_call_arguments.py +++ b/robosystems_client/models/mcp_tool_call_arguments.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/mcp_tools_response.py b/robosystems_client/models/mcp_tools_response.py index 7b17ec1..bd00e41 100644 --- a/robosystems_client/models/mcp_tools_response.py +++ b/robosystems_client/models/mcp_tools_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -16,10 +18,10 @@ class MCPToolsResponse: """Response model for MCP tools listing. Attributes: - tools (list['MCPToolsResponseToolsItem']): List of available MCP tools with their schemas + tools (list[MCPToolsResponseToolsItem]): List of available MCP tools with their schemas """ - tools: list["MCPToolsResponseToolsItem"] + tools: list[MCPToolsResponseToolsItem] additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: diff --git a/robosystems_client/models/mcp_tools_response_tools_item.py b/robosystems_client/models/mcp_tools_response_tools_item.py index 1ea284f..1ef2506 100644 --- a/robosystems_client/models/mcp_tools_response_tools_item.py +++ b/robosystems_client/models/mcp_tools_response_tools_item.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/o_auth_callback_request.py b/robosystems_client/models/o_auth_callback_request.py index ba5a506..5212d8e 100644 --- a/robosystems_client/models/o_auth_callback_request.py +++ b/robosystems_client/models/o_auth_callback_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -16,16 +18,16 @@ class OAuthCallbackRequest: Attributes: code (str): Authorization code from OAuth provider state (str): OAuth state for verification - realm_id (Union[None, Unset, str]): QuickBooks-specific realm ID - error (Union[None, Unset, str]): OAuth error if authorization failed - error_description (Union[None, Unset, str]): OAuth error details + realm_id (None | str | Unset): QuickBooks-specific realm ID + error (None | str | Unset): OAuth error if authorization failed + error_description (None | str | Unset): OAuth error details """ code: str state: str - realm_id: Union[None, Unset, str] = UNSET - error: Union[None, Unset, str] = UNSET - error_description: Union[None, Unset, str] = UNSET + realm_id: None | str | Unset = UNSET + error: None | str | Unset = UNSET + error_description: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -33,19 +35,19 @@ def to_dict(self) -> dict[str, Any]: state = self.state - realm_id: Union[None, Unset, str] + realm_id: None | str | Unset if isinstance(self.realm_id, Unset): realm_id = UNSET else: realm_id = self.realm_id - error: Union[None, Unset, str] + error: None | str | Unset if isinstance(self.error, Unset): error = UNSET else: error = self.error - error_description: Union[None, Unset, str] + error_description: None | str | Unset if isinstance(self.error_description, Unset): error_description = UNSET else: @@ -75,30 +77,30 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: state = d.pop("state") - def _parse_realm_id(data: object) -> Union[None, Unset, str]: + def _parse_realm_id(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) realm_id = _parse_realm_id(d.pop("realm_id", UNSET)) - def _parse_error(data: object) -> Union[None, Unset, str]: + def _parse_error(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) error = _parse_error(d.pop("error", UNSET)) - def _parse_error_description(data: object) -> Union[None, Unset, str]: + def _parse_error_description(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) error_description = _parse_error_description(d.pop("error_description", UNSET)) diff --git a/robosystems_client/models/o_auth_init_request.py b/robosystems_client/models/o_auth_init_request.py index aab0a01..9fc09b8 100644 --- a/robosystems_client/models/o_auth_init_request.py +++ b/robosystems_client/models/o_auth_init_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -21,13 +23,13 @@ class OAuthInitRequest: Attributes: connection_id (str): Connection ID to link OAuth to - redirect_uri (Union[None, Unset, str]): Override default redirect URI - additional_params (Union['OAuthInitRequestAdditionalParamsType0', None, Unset]): Provider-specific parameters + redirect_uri (None | str | Unset): Override default redirect URI + additional_params (None | OAuthInitRequestAdditionalParamsType0 | Unset): Provider-specific parameters """ connection_id: str - redirect_uri: Union[None, Unset, str] = UNSET - additional_params: Union["OAuthInitRequestAdditionalParamsType0", None, Unset] = UNSET + redirect_uri: None | str | Unset = UNSET + additional_params: None | OAuthInitRequestAdditionalParamsType0 | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -37,13 +39,13 @@ def to_dict(self) -> dict[str, Any]: connection_id = self.connection_id - redirect_uri: Union[None, Unset, str] + redirect_uri: None | str | Unset if isinstance(self.redirect_uri, Unset): redirect_uri = UNSET else: redirect_uri = self.redirect_uri - additional_params: Union[None, Unset, dict[str, Any]] + additional_params: dict[str, Any] | None | Unset if isinstance(self.additional_params, Unset): additional_params = UNSET elif isinstance(self.additional_params, OAuthInitRequestAdditionalParamsType0): @@ -74,18 +76,18 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) connection_id = d.pop("connection_id") - def _parse_redirect_uri(data: object) -> Union[None, Unset, str]: + def _parse_redirect_uri(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) redirect_uri = _parse_redirect_uri(d.pop("redirect_uri", UNSET)) def _parse_additional_params( data: object, - ) -> Union["OAuthInitRequestAdditionalParamsType0", None, Unset]: + ) -> None | OAuthInitRequestAdditionalParamsType0 | Unset: if data is None: return data if isinstance(data, Unset): @@ -96,9 +98,9 @@ def _parse_additional_params( additional_params_type_0 = OAuthInitRequestAdditionalParamsType0.from_dict(data) return additional_params_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["OAuthInitRequestAdditionalParamsType0", None, Unset], data) + return cast(None | OAuthInitRequestAdditionalParamsType0 | Unset, data) additional_params = _parse_additional_params(d.pop("additional_params", UNSET)) diff --git a/robosystems_client/models/o_auth_init_request_additional_params_type_0.py b/robosystems_client/models/o_auth_init_request_additional_params_type_0.py index 70e3797..6f50561 100644 --- a/robosystems_client/models/o_auth_init_request_additional_params_type_0.py +++ b/robosystems_client/models/o_auth_init_request_additional_params_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/o_auth_init_response.py b/robosystems_client/models/o_auth_init_response.py index 90135de..5ac85ae 100644 --- a/robosystems_client/models/o_auth_init_response.py +++ b/robosystems_client/models/o_auth_init_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import datetime from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/offering_repository_plan.py b/robosystems_client/models/offering_repository_plan.py index ed19f84..552dc56 100644 --- a/robosystems_client/models/offering_repository_plan.py +++ b/robosystems_client/models/offering_repository_plan.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -26,7 +28,7 @@ class OfferingRepositoryPlan: monthly_credits (int): Monthly credit allocation access_level (str): Access level features (list[str]): List of features - rate_limits (Union['OfferingRepositoryPlanRateLimitsType0', None, Unset]): Rate limits for this plan + rate_limits (None | OfferingRepositoryPlanRateLimitsType0 | Unset): Rate limits for this plan """ plan: str @@ -35,7 +37,7 @@ class OfferingRepositoryPlan: monthly_credits: int access_level: str features: list[str] - rate_limits: Union["OfferingRepositoryPlanRateLimitsType0", None, Unset] = UNSET + rate_limits: None | OfferingRepositoryPlanRateLimitsType0 | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -55,7 +57,7 @@ def to_dict(self) -> dict[str, Any]: features = self.features - rate_limits: Union[None, Unset, dict[str, Any]] + rate_limits: dict[str, Any] | None | Unset if isinstance(self.rate_limits, Unset): rate_limits = UNSET elif isinstance(self.rate_limits, OfferingRepositoryPlanRateLimitsType0): @@ -101,7 +103,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: def _parse_rate_limits( data: object, - ) -> Union["OfferingRepositoryPlanRateLimitsType0", None, Unset]: + ) -> None | OfferingRepositoryPlanRateLimitsType0 | Unset: if data is None: return data if isinstance(data, Unset): @@ -112,9 +114,9 @@ def _parse_rate_limits( rate_limits_type_0 = OfferingRepositoryPlanRateLimitsType0.from_dict(data) return rate_limits_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["OfferingRepositoryPlanRateLimitsType0", None, Unset], data) + return cast(None | OfferingRepositoryPlanRateLimitsType0 | Unset, data) rate_limits = _parse_rate_limits(d.pop("rate_limits", UNSET)) diff --git a/robosystems_client/models/offering_repository_plan_rate_limits_type_0.py b/robosystems_client/models/offering_repository_plan_rate_limits_type_0.py index c3055f7..db1277b 100644 --- a/robosystems_client/models/offering_repository_plan_rate_limits_type_0.py +++ b/robosystems_client/models/offering_repository_plan_rate_limits_type_0.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -11,9 +13,7 @@ class OfferingRepositoryPlanRateLimitsType0: """ """ - additional_properties: dict[str, Union[None, int]] = _attrs_field( - init=False, factory=dict - ) + additional_properties: dict[str, int | None] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: field_dict: dict[str, Any] = {} @@ -30,10 +30,10 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: additional_properties = {} for prop_name, prop_dict in d.items(): - def _parse_additional_property(data: object) -> Union[None, int]: + def _parse_additional_property(data: object) -> int | None: if data is None: return data - return cast(Union[None, int], data) + return cast(int | None, data) additional_property = _parse_additional_property(prop_dict) @@ -48,10 +48,10 @@ def _parse_additional_property(data: object) -> Union[None, int]: def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) - def __getitem__(self, key: str) -> Union[None, int]: + def __getitem__(self, key: str) -> int | None: return self.additional_properties[key] - def __setitem__(self, key: str, value: Union[None, int]) -> None: + def __setitem__(self, key: str, value: int | None) -> None: self.additional_properties[key] = value def __delitem__(self, key: str) -> None: diff --git a/robosystems_client/models/operation_costs.py b/robosystems_client/models/operation_costs.py index e609e52..4833363 100644 --- a/robosystems_client/models/operation_costs.py +++ b/robosystems_client/models/operation_costs.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar, cast @@ -25,8 +27,8 @@ class OperationCosts: """ description: str - ai_operations: "OperationCostsAiOperations" - token_pricing: "OperationCostsTokenPricing" + ai_operations: OperationCostsAiOperations + token_pricing: OperationCostsTokenPricing included_operations: list[str] notes: list[str] additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) diff --git a/robosystems_client/models/operation_costs_ai_operations.py b/robosystems_client/models/operation_costs_ai_operations.py index ff305a9..c233850 100644 --- a/robosystems_client/models/operation_costs_ai_operations.py +++ b/robosystems_client/models/operation_costs_ai_operations.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/operation_costs_token_pricing.py b/robosystems_client/models/operation_costs_token_pricing.py index c601290..79d9fcc 100644 --- a/robosystems_client/models/operation_costs_token_pricing.py +++ b/robosystems_client/models/operation_costs_token_pricing.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -15,7 +17,7 @@ class OperationCostsTokenPricing: """Token pricing by model""" - additional_properties: dict[str, "TokenPricing"] = _attrs_field( + additional_properties: dict[str, TokenPricing] = _attrs_field( init=False, factory=dict ) @@ -46,10 +48,10 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) - def __getitem__(self, key: str) -> "TokenPricing": + def __getitem__(self, key: str) -> TokenPricing: return self.additional_properties[key] - def __setitem__(self, key: str, value: "TokenPricing") -> None: + def __setitem__(self, key: str, value: TokenPricing) -> None: self.additional_properties[key] = value def __delitem__(self, key: str) -> None: diff --git a/robosystems_client/models/org_detail_response.py b/robosystems_client/models/org_detail_response.py index db24f66..9def86e 100644 --- a/robosystems_client/models/org_detail_response.py +++ b/robosystems_client/models/org_detail_response.py @@ -1,6 +1,8 @@ +from __future__ import annotations + import datetime from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -27,9 +29,9 @@ class OrgDetailResponse: name (str): org_type (OrgType): user_role (OrgRole): - members (list['OrgDetailResponseMembersItem']): - graphs (list['OrgDetailResponseGraphsItem']): - limits (Union['OrgDetailResponseLimitsType0', None]): + members (list[OrgDetailResponseMembersItem]): + graphs (list[OrgDetailResponseGraphsItem]): + limits (None | OrgDetailResponseLimitsType0): created_at (datetime.datetime): updated_at (datetime.datetime): """ @@ -38,9 +40,9 @@ class OrgDetailResponse: name: str org_type: OrgType user_role: OrgRole - members: list["OrgDetailResponseMembersItem"] - graphs: list["OrgDetailResponseGraphsItem"] - limits: Union["OrgDetailResponseLimitsType0", None] + members: list[OrgDetailResponseMembersItem] + graphs: list[OrgDetailResponseGraphsItem] + limits: None | OrgDetailResponseLimitsType0 created_at: datetime.datetime updated_at: datetime.datetime additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) @@ -66,7 +68,7 @@ def to_dict(self) -> dict[str, Any]: graphs_item = graphs_item_data.to_dict() graphs.append(graphs_item) - limits: Union[None, dict[str, Any]] + limits: dict[str, Any] | None if isinstance(self.limits, OrgDetailResponseLimitsType0): limits = self.limits.to_dict() else: @@ -123,7 +125,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: graphs.append(graphs_item) - def _parse_limits(data: object) -> Union["OrgDetailResponseLimitsType0", None]: + def _parse_limits(data: object) -> None | OrgDetailResponseLimitsType0: if data is None: return data try: @@ -132,9 +134,9 @@ def _parse_limits(data: object) -> Union["OrgDetailResponseLimitsType0", None]: limits_type_0 = OrgDetailResponseLimitsType0.from_dict(data) return limits_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["OrgDetailResponseLimitsType0", None], data) + return cast(None | OrgDetailResponseLimitsType0, data) limits = _parse_limits(d.pop("limits")) diff --git a/robosystems_client/models/org_detail_response_graphs_item.py b/robosystems_client/models/org_detail_response_graphs_item.py index 12e6659..794fc63 100644 --- a/robosystems_client/models/org_detail_response_graphs_item.py +++ b/robosystems_client/models/org_detail_response_graphs_item.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/org_detail_response_limits_type_0.py b/robosystems_client/models/org_detail_response_limits_type_0.py index 360010f..5cc78d5 100644 --- a/robosystems_client/models/org_detail_response_limits_type_0.py +++ b/robosystems_client/models/org_detail_response_limits_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/org_detail_response_members_item.py b/robosystems_client/models/org_detail_response_members_item.py index 3125d13..82cb665 100644 --- a/robosystems_client/models/org_detail_response_members_item.py +++ b/robosystems_client/models/org_detail_response_members_item.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/org_limits_response.py b/robosystems_client/models/org_limits_response.py index 6459250..a4da53e 100644 --- a/robosystems_client/models/org_limits_response.py +++ b/robosystems_client/models/org_limits_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar, cast @@ -25,7 +27,7 @@ class OrgLimitsResponse: org_id: str max_graphs: int - current_usage: "OrgLimitsResponseCurrentUsage" + current_usage: OrgLimitsResponseCurrentUsage warnings: list[str] can_create_graph: bool additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) diff --git a/robosystems_client/models/org_limits_response_current_usage.py b/robosystems_client/models/org_limits_response_current_usage.py index 0b57a50..19483a5 100644 --- a/robosystems_client/models/org_limits_response_current_usage.py +++ b/robosystems_client/models/org_limits_response_current_usage.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/org_list_response.py b/robosystems_client/models/org_list_response.py index d8d3280..114b214 100644 --- a/robosystems_client/models/org_list_response.py +++ b/robosystems_client/models/org_list_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -16,11 +18,11 @@ class OrgListResponse: """List of organizations response. Attributes: - orgs (list['OrgResponse']): + orgs (list[OrgResponse]): total (int): """ - orgs: list["OrgResponse"] + orgs: list[OrgResponse] total: int additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) diff --git a/robosystems_client/models/org_member_list_response.py b/robosystems_client/models/org_member_list_response.py index ebff899..1859402 100644 --- a/robosystems_client/models/org_member_list_response.py +++ b/robosystems_client/models/org_member_list_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -16,12 +18,12 @@ class OrgMemberListResponse: """List of organization members response. Attributes: - members (list['OrgMemberResponse']): + members (list[OrgMemberResponse]): total (int): org_id (str): """ - members: list["OrgMemberResponse"] + members: list[OrgMemberResponse] total: int org_id: str additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) diff --git a/robosystems_client/models/org_member_response.py b/robosystems_client/models/org_member_response.py index fef39c0..5d4fc53 100644 --- a/robosystems_client/models/org_member_response.py +++ b/robosystems_client/models/org_member_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import datetime from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/org_response.py b/robosystems_client/models/org_response.py index 0d41e84..b1ecd3a 100644 --- a/robosystems_client/models/org_response.py +++ b/robosystems_client/models/org_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import datetime from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/org_usage_response.py b/robosystems_client/models/org_usage_response.py index 5256d43..365f08a 100644 --- a/robosystems_client/models/org_usage_response.py +++ b/robosystems_client/models/org_usage_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import datetime from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -29,17 +31,17 @@ class OrgUsageResponse: start_date (datetime.datetime): end_date (datetime.datetime): summary (OrgUsageSummary): Organization usage summary. - graph_details (list['OrgUsageResponseGraphDetailsItem']): - daily_trend (list['OrgUsageResponseDailyTrendItem']): + graph_details (list[OrgUsageResponseGraphDetailsItem]): + daily_trend (list[OrgUsageResponseDailyTrendItem]): """ org_id: str period_days: int start_date: datetime.datetime end_date: datetime.datetime - summary: "OrgUsageSummary" - graph_details: list["OrgUsageResponseGraphDetailsItem"] - daily_trend: list["OrgUsageResponseDailyTrendItem"] + summary: OrgUsageSummary + graph_details: list[OrgUsageResponseGraphDetailsItem] + daily_trend: list[OrgUsageResponseDailyTrendItem] additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: diff --git a/robosystems_client/models/org_usage_response_daily_trend_item.py b/robosystems_client/models/org_usage_response_daily_trend_item.py index c69c535..2ccb473 100644 --- a/robosystems_client/models/org_usage_response_daily_trend_item.py +++ b/robosystems_client/models/org_usage_response_daily_trend_item.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/org_usage_response_graph_details_item.py b/robosystems_client/models/org_usage_response_graph_details_item.py index 2bddf8d..324e134 100644 --- a/robosystems_client/models/org_usage_response_graph_details_item.py +++ b/robosystems_client/models/org_usage_response_graph_details_item.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/org_usage_summary.py b/robosystems_client/models/org_usage_summary.py index 0fc4471..3fb789d 100644 --- a/robosystems_client/models/org_usage_summary.py +++ b/robosystems_client/models/org_usage_summary.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -20,9 +22,9 @@ class OrgUsageSummary: daily_avg_api_calls (float): projected_monthly_credits (float): projected_monthly_api_calls (int): - credits_limit (Union[None, int]): - api_calls_limit (Union[None, int]): - storage_limit_gb (Union[None, int]): + credits_limit (int | None): + api_calls_limit (int | None): + storage_limit_gb (int | None): """ total_credits_used: float @@ -33,9 +35,9 @@ class OrgUsageSummary: daily_avg_api_calls: float projected_monthly_credits: float projected_monthly_api_calls: int - credits_limit: Union[None, int] - api_calls_limit: Union[None, int] - storage_limit_gb: Union[None, int] + credits_limit: int | None + api_calls_limit: int | None + storage_limit_gb: int | None additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -55,13 +57,13 @@ def to_dict(self) -> dict[str, Any]: projected_monthly_api_calls = self.projected_monthly_api_calls - credits_limit: Union[None, int] + credits_limit: int | None credits_limit = self.credits_limit - api_calls_limit: Union[None, int] + api_calls_limit: int | None api_calls_limit = self.api_calls_limit - storage_limit_gb: Union[None, int] + storage_limit_gb: int | None storage_limit_gb = self.storage_limit_gb field_dict: dict[str, Any] = {} @@ -103,24 +105,24 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: projected_monthly_api_calls = d.pop("projected_monthly_api_calls") - def _parse_credits_limit(data: object) -> Union[None, int]: + def _parse_credits_limit(data: object) -> int | None: if data is None: return data - return cast(Union[None, int], data) + return cast(int | None, data) credits_limit = _parse_credits_limit(d.pop("credits_limit")) - def _parse_api_calls_limit(data: object) -> Union[None, int]: + def _parse_api_calls_limit(data: object) -> int | None: if data is None: return data - return cast(Union[None, int], data) + return cast(int | None, data) api_calls_limit = _parse_api_calls_limit(d.pop("api_calls_limit")) - def _parse_storage_limit_gb(data: object) -> Union[None, int]: + def _parse_storage_limit_gb(data: object) -> int | None: if data is None: return data - return cast(Union[None, int], data) + return cast(int | None, data) storage_limit_gb = _parse_storage_limit_gb(d.pop("storage_limit_gb")) diff --git a/robosystems_client/models/password_check_request.py b/robosystems_client/models/password_check_request.py index 88591bb..5598312 100644 --- a/robosystems_client/models/password_check_request.py +++ b/robosystems_client/models/password_check_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -15,17 +17,17 @@ class PasswordCheckRequest: Attributes: password (str): Password to check - email (Union[None, Unset, str]): User email for personalization checks + email (None | str | Unset): User email for personalization checks """ password: str - email: Union[None, Unset, str] = UNSET + email: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: password = self.password - email: Union[None, Unset, str] + email: None | str | Unset if isinstance(self.email, Unset): email = UNSET else: @@ -48,12 +50,12 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) password = d.pop("password") - def _parse_email(data: object) -> Union[None, Unset, str]: + def _parse_email(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) email = _parse_email(d.pop("email", UNSET)) diff --git a/robosystems_client/models/password_check_response.py b/robosystems_client/models/password_check_response.py index 3ef0af0..f931e15 100644 --- a/robosystems_client/models/password_check_response.py +++ b/robosystems_client/models/password_check_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar, cast @@ -31,7 +33,7 @@ class PasswordCheckResponse: score: int errors: list[str] suggestions: list[str] - character_types: "PasswordCheckResponseCharacterTypes" + character_types: PasswordCheckResponseCharacterTypes additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: diff --git a/robosystems_client/models/password_check_response_character_types.py b/robosystems_client/models/password_check_response_character_types.py index 7fbfb71..187c030 100644 --- a/robosystems_client/models/password_check_response_character_types.py +++ b/robosystems_client/models/password_check_response_character_types.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/password_policy_response.py b/robosystems_client/models/password_policy_response.py index d00e702..8f36ac8 100644 --- a/robosystems_client/models/password_policy_response.py +++ b/robosystems_client/models/password_policy_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -19,7 +21,7 @@ class PasswordPolicyResponse: policy (PasswordPolicyResponsePolicy): Current password policy requirements """ - policy: "PasswordPolicyResponsePolicy" + policy: PasswordPolicyResponsePolicy additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: diff --git a/robosystems_client/models/password_policy_response_policy.py b/robosystems_client/models/password_policy_response_policy.py index eecc118..ff0c205 100644 --- a/robosystems_client/models/password_policy_response_policy.py +++ b/robosystems_client/models/password_policy_response_policy.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/payment_method.py b/robosystems_client/models/payment_method.py index 488d87f..deecc4f 100644 --- a/robosystems_client/models/payment_method.py +++ b/robosystems_client/models/payment_method.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -17,19 +19,19 @@ class PaymentMethod: id (str): Payment method ID type_ (str): Payment method type (card, bank_account, etc.) is_default (bool): Whether this is the default payment method - brand (Union[None, Unset, str]): Card brand (visa, mastercard, etc.) - last4 (Union[None, Unset, str]): Last 4 digits of the card or account number - exp_month (Union[None, Unset, int]): Expiration month of the card - exp_year (Union[None, Unset, int]): Expiration year of the card + brand (None | str | Unset): Card brand (visa, mastercard, etc.) + last4 (None | str | Unset): Last 4 digits of the card or account number + exp_month (int | None | Unset): Expiration month of the card + exp_year (int | None | Unset): Expiration year of the card """ id: str type_: str is_default: bool - brand: Union[None, Unset, str] = UNSET - last4: Union[None, Unset, str] = UNSET - exp_month: Union[None, Unset, int] = UNSET - exp_year: Union[None, Unset, int] = UNSET + brand: None | str | Unset = UNSET + last4: None | str | Unset = UNSET + exp_month: int | None | Unset = UNSET + exp_year: int | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -39,25 +41,25 @@ def to_dict(self) -> dict[str, Any]: is_default = self.is_default - brand: Union[None, Unset, str] + brand: None | str | Unset if isinstance(self.brand, Unset): brand = UNSET else: brand = self.brand - last4: Union[None, Unset, str] + last4: None | str | Unset if isinstance(self.last4, Unset): last4 = UNSET else: last4 = self.last4 - exp_month: Union[None, Unset, int] + exp_month: int | None | Unset if isinstance(self.exp_month, Unset): exp_month = UNSET else: exp_month = self.exp_month - exp_year: Union[None, Unset, int] + exp_year: int | None | Unset if isinstance(self.exp_year, Unset): exp_year = UNSET else: @@ -92,39 +94,39 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: is_default = d.pop("is_default") - def _parse_brand(data: object) -> Union[None, Unset, str]: + def _parse_brand(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) brand = _parse_brand(d.pop("brand", UNSET)) - def _parse_last4(data: object) -> Union[None, Unset, str]: + def _parse_last4(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) last4 = _parse_last4(d.pop("last4", UNSET)) - def _parse_exp_month(data: object) -> Union[None, Unset, int]: + def _parse_exp_month(data: object) -> int | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, int], data) + return cast(int | None | Unset, data) exp_month = _parse_exp_month(d.pop("exp_month", UNSET)) - def _parse_exp_year(data: object) -> Union[None, Unset, int]: + def _parse_exp_year(data: object) -> int | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, int], data) + return cast(int | None | Unset, data) exp_year = _parse_exp_year(d.pop("exp_year", UNSET)) diff --git a/robosystems_client/models/performance_insights.py b/robosystems_client/models/performance_insights.py index b5efefe..e6b92a9 100644 --- a/robosystems_client/models/performance_insights.py +++ b/robosystems_client/models/performance_insights.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -24,14 +26,14 @@ class PerformanceInsights: analysis_period_days (int): Analysis period in days total_operations (int): Total operations analyzed operation_stats (PerformanceInsightsOperationStats): Performance stats by operation type - slow_queries (list['PerformanceInsightsSlowQueriesItem']): Top slow queries (over 5 seconds) + slow_queries (list[PerformanceInsightsSlowQueriesItem]): Top slow queries (over 5 seconds) performance_score (int): Performance score (0-100) """ analysis_period_days: int total_operations: int - operation_stats: "PerformanceInsightsOperationStats" - slow_queries: list["PerformanceInsightsSlowQueriesItem"] + operation_stats: PerformanceInsightsOperationStats + slow_queries: list[PerformanceInsightsSlowQueriesItem] performance_score: int additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) diff --git a/robosystems_client/models/performance_insights_operation_stats.py b/robosystems_client/models/performance_insights_operation_stats.py index fd28392..cfc6ae5 100644 --- a/robosystems_client/models/performance_insights_operation_stats.py +++ b/robosystems_client/models/performance_insights_operation_stats.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/performance_insights_slow_queries_item.py b/robosystems_client/models/performance_insights_slow_queries_item.py index b28db48..2a5e4ac 100644 --- a/robosystems_client/models/performance_insights_slow_queries_item.py +++ b/robosystems_client/models/performance_insights_slow_queries_item.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/plaid_connection_config.py b/robosystems_client/models/plaid_connection_config.py index c1d70e0..2922cac 100644 --- a/robosystems_client/models/plaid_connection_config.py +++ b/robosystems_client/models/plaid_connection_config.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -23,18 +25,18 @@ class PlaidConnectionConfig: """Plaid-specific connection configuration. Attributes: - public_token (Union[None, Unset, str]): Plaid public token for exchange - access_token (Union[None, Unset, str]): Plaid access token (set after exchange) - item_id (Union[None, Unset, str]): Plaid item ID - institution (Union['PlaidConnectionConfigInstitutionType0', None, Unset]): Institution information - accounts (Union[None, Unset, list['PlaidConnectionConfigAccountsType0Item']]): Connected accounts + public_token (None | str | Unset): Plaid public token for exchange + access_token (None | str | Unset): Plaid access token (set after exchange) + item_id (None | str | Unset): Plaid item ID + institution (None | PlaidConnectionConfigInstitutionType0 | Unset): Institution information + accounts (list[PlaidConnectionConfigAccountsType0Item] | None | Unset): Connected accounts """ - public_token: Union[None, Unset, str] = UNSET - access_token: Union[None, Unset, str] = UNSET - item_id: Union[None, Unset, str] = UNSET - institution: Union["PlaidConnectionConfigInstitutionType0", None, Unset] = UNSET - accounts: Union[None, Unset, list["PlaidConnectionConfigAccountsType0Item"]] = UNSET + public_token: None | str | Unset = UNSET + access_token: None | str | Unset = UNSET + item_id: None | str | Unset = UNSET + institution: None | PlaidConnectionConfigInstitutionType0 | Unset = UNSET + accounts: list[PlaidConnectionConfigAccountsType0Item] | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -42,25 +44,25 @@ def to_dict(self) -> dict[str, Any]: PlaidConnectionConfigInstitutionType0, ) - public_token: Union[None, Unset, str] + public_token: None | str | Unset if isinstance(self.public_token, Unset): public_token = UNSET else: public_token = self.public_token - access_token: Union[None, Unset, str] + access_token: None | str | Unset if isinstance(self.access_token, Unset): access_token = UNSET else: access_token = self.access_token - item_id: Union[None, Unset, str] + item_id: None | str | Unset if isinstance(self.item_id, Unset): item_id = UNSET else: item_id = self.item_id - institution: Union[None, Unset, dict[str, Any]] + institution: dict[str, Any] | None | Unset if isinstance(self.institution, Unset): institution = UNSET elif isinstance(self.institution, PlaidConnectionConfigInstitutionType0): @@ -68,7 +70,7 @@ def to_dict(self) -> dict[str, Any]: else: institution = self.institution - accounts: Union[None, Unset, list[dict[str, Any]]] + accounts: list[dict[str, Any]] | None | Unset if isinstance(self.accounts, Unset): accounts = UNSET elif isinstance(self.accounts, list): @@ -107,36 +109,36 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) - def _parse_public_token(data: object) -> Union[None, Unset, str]: + def _parse_public_token(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) public_token = _parse_public_token(d.pop("public_token", UNSET)) - def _parse_access_token(data: object) -> Union[None, Unset, str]: + def _parse_access_token(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) access_token = _parse_access_token(d.pop("access_token", UNSET)) - def _parse_item_id(data: object) -> Union[None, Unset, str]: + def _parse_item_id(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) item_id = _parse_item_id(d.pop("item_id", UNSET)) def _parse_institution( data: object, - ) -> Union["PlaidConnectionConfigInstitutionType0", None, Unset]: + ) -> None | PlaidConnectionConfigInstitutionType0 | Unset: if data is None: return data if isinstance(data, Unset): @@ -147,15 +149,15 @@ def _parse_institution( institution_type_0 = PlaidConnectionConfigInstitutionType0.from_dict(data) return institution_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["PlaidConnectionConfigInstitutionType0", None, Unset], data) + return cast(None | PlaidConnectionConfigInstitutionType0 | Unset, data) institution = _parse_institution(d.pop("institution", UNSET)) def _parse_accounts( data: object, - ) -> Union[None, Unset, list["PlaidConnectionConfigAccountsType0Item"]]: + ) -> list[PlaidConnectionConfigAccountsType0Item] | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -173,11 +175,9 @@ def _parse_accounts( accounts_type_0.append(accounts_type_0_item) return accounts_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast( - Union[None, Unset, list["PlaidConnectionConfigAccountsType0Item"]], data - ) + return cast(list[PlaidConnectionConfigAccountsType0Item] | None | Unset, data) accounts = _parse_accounts(d.pop("accounts", UNSET)) diff --git a/robosystems_client/models/plaid_connection_config_accounts_type_0_item.py b/robosystems_client/models/plaid_connection_config_accounts_type_0_item.py index f95a238..a98cbb5 100644 --- a/robosystems_client/models/plaid_connection_config_accounts_type_0_item.py +++ b/robosystems_client/models/plaid_connection_config_accounts_type_0_item.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/plaid_connection_config_institution_type_0.py b/robosystems_client/models/plaid_connection_config_institution_type_0.py index 3ccffbd..15953cb 100644 --- a/robosystems_client/models/plaid_connection_config_institution_type_0.py +++ b/robosystems_client/models/plaid_connection_config_institution_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/portal_session_response.py b/robosystems_client/models/portal_session_response.py index 683ba57..105dfe7 100644 --- a/robosystems_client/models/portal_session_response.py +++ b/robosystems_client/models/portal_session_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/query_limits.py b/robosystems_client/models/query_limits.py index 1cbaf71..7c11e71 100644 --- a/robosystems_client/models/query_limits.py +++ b/robosystems_client/models/query_limits.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/quick_books_connection_config.py b/robosystems_client/models/quick_books_connection_config.py index 544f832..0da94c6 100644 --- a/robosystems_client/models/quick_books_connection_config.py +++ b/robosystems_client/models/quick_books_connection_config.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -14,22 +16,22 @@ class QuickBooksConnectionConfig: """QuickBooks-specific connection configuration. Attributes: - realm_id (Union[None, Unset, str]): QuickBooks Realm ID - refresh_token (Union[None, Unset, str]): OAuth refresh token + realm_id (None | str | Unset): QuickBooks Realm ID + refresh_token (None | str | Unset): OAuth refresh token """ - realm_id: Union[None, Unset, str] = UNSET - refresh_token: Union[None, Unset, str] = UNSET + realm_id: None | str | Unset = UNSET + refresh_token: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: - realm_id: Union[None, Unset, str] + realm_id: None | str | Unset if isinstance(self.realm_id, Unset): realm_id = UNSET else: realm_id = self.realm_id - refresh_token: Union[None, Unset, str] + refresh_token: None | str | Unset if isinstance(self.refresh_token, Unset): refresh_token = UNSET else: @@ -49,21 +51,21 @@ def to_dict(self) -> dict[str, Any]: def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) - def _parse_realm_id(data: object) -> Union[None, Unset, str]: + def _parse_realm_id(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) realm_id = _parse_realm_id(d.pop("realm_id", UNSET)) - def _parse_refresh_token(data: object) -> Union[None, Unset, str]: + def _parse_refresh_token(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) refresh_token = _parse_refresh_token(d.pop("refresh_token", UNSET)) diff --git a/robosystems_client/models/rate_limits.py b/robosystems_client/models/rate_limits.py index 643b352..341b8fe 100644 --- a/robosystems_client/models/rate_limits.py +++ b/robosystems_client/models/rate_limits.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/register_request.py b/robosystems_client/models/register_request.py index 4cc97c2..6cebfe8 100644 --- a/robosystems_client/models/register_request.py +++ b/robosystems_client/models/register_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -17,13 +19,13 @@ class RegisterRequest: name (str): User's display name email (str): User's email address password (str): User's password (must meet security requirements) - captcha_token (Union[None, Unset, str]): CAPTCHA verification token (required in production) + captcha_token (None | str | Unset): CAPTCHA verification token (required in production) """ name: str email: str password: str - captcha_token: Union[None, Unset, str] = UNSET + captcha_token: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -33,7 +35,7 @@ def to_dict(self) -> dict[str, Any]: password = self.password - captcha_token: Union[None, Unset, str] + captcha_token: None | str | Unset if isinstance(self.captcha_token, Unset): captcha_token = UNSET else: @@ -62,12 +64,12 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: password = d.pop("password") - def _parse_captcha_token(data: object) -> Union[None, Unset, str]: + def _parse_captcha_token(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) captcha_token = _parse_captcha_token(d.pop("captcha_token", UNSET)) diff --git a/robosystems_client/models/repository_info.py b/robosystems_client/models/repository_info.py index c6c8bd0..0482410 100644 --- a/robosystems_client/models/repository_info.py +++ b/robosystems_client/models/repository_info.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -21,7 +23,7 @@ class RepositoryInfo: description (str): Repository description enabled (bool): Whether repository is enabled coming_soon (bool): Whether repository is coming soon - plans (list['OfferingRepositoryPlan']): Available plans + plans (list[OfferingRepositoryPlan]): Available plans """ type_: str @@ -29,7 +31,7 @@ class RepositoryInfo: description: str enabled: bool coming_soon: bool - plans: list["OfferingRepositoryPlan"] + plans: list[OfferingRepositoryPlan] additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: diff --git a/robosystems_client/models/repository_subscriptions.py b/robosystems_client/models/repository_subscriptions.py index 4edaa3d..128306f 100644 --- a/robosystems_client/models/repository_subscriptions.py +++ b/robosystems_client/models/repository_subscriptions.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar, cast @@ -21,13 +23,13 @@ class RepositorySubscriptions: Attributes: description (str): Description of repository subscriptions pricing_model (str): Pricing model type (per_graph or per_organization) - repositories (list['RepositoryInfo']): Available repositories + repositories (list[RepositoryInfo]): Available repositories notes (list[str]): Important notes """ description: str pricing_model: str - repositories: list["RepositoryInfo"] + repositories: list[RepositoryInfo] notes: list[str] additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) diff --git a/robosystems_client/models/resend_verification_email_response_resendverificationemail.py b/robosystems_client/models/resend_verification_email_response_resendverificationemail.py index 53dc8b6..eb754a5 100644 --- a/robosystems_client/models/resend_verification_email_response_resendverificationemail.py +++ b/robosystems_client/models/resend_verification_email_response_resendverificationemail.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/reset_password_request.py b/robosystems_client/models/reset_password_request.py index cedc67f..a3a583c 100644 --- a/robosystems_client/models/reset_password_request.py +++ b/robosystems_client/models/reset_password_request.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/reset_password_validate_response.py b/robosystems_client/models/reset_password_validate_response.py index c813c63..0249b87 100644 --- a/robosystems_client/models/reset_password_validate_response.py +++ b/robosystems_client/models/reset_password_validate_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -15,17 +17,17 @@ class ResetPasswordValidateResponse: Attributes: valid (bool): Whether the token is valid - email (Union[None, Unset, str]): Masked email address if token is valid + email (None | str | Unset): Masked email address if token is valid """ valid: bool - email: Union[None, Unset, str] = UNSET + email: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: valid = self.valid - email: Union[None, Unset, str] + email: None | str | Unset if isinstance(self.email, Unset): email = UNSET else: @@ -48,12 +50,12 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) valid = d.pop("valid") - def _parse_email(data: object) -> Union[None, Unset, str]: + def _parse_email(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) email = _parse_email(d.pop("email", UNSET)) diff --git a/robosystems_client/models/save_view_request.py b/robosystems_client/models/save_view_request.py new file mode 100644 index 0000000..3a09f83 --- /dev/null +++ b/robosystems_client/models/save_view_request.py @@ -0,0 +1,138 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="SaveViewRequest") + + +@_attrs_define +class SaveViewRequest: + """ + Attributes: + report_type (str): Type of report (e.g., 'Annual Report', 'Quarterly Report', '10-K') + period_start (str): Period start date (YYYY-MM-DD) + period_end (str): Period end date (YYYY-MM-DD) + report_id (None | str | Unset): Existing report ID to update (if provided, deletes existing facts/structures and + creates new ones) + entity_id (None | str | Unset): Entity identifier (defaults to primary entity) + include_presentation (bool | Unset): Create presentation structures Default: True. + include_calculation (bool | Unset): Create calculation structures Default: True. + """ + + report_type: str + period_start: str + period_end: str + report_id: None | str | Unset = UNSET + entity_id: None | str | Unset = UNSET + include_presentation: bool | Unset = True + include_calculation: bool | Unset = True + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + report_type = self.report_type + + period_start = self.period_start + + period_end = self.period_end + + report_id: None | str | Unset + if isinstance(self.report_id, Unset): + report_id = UNSET + else: + report_id = self.report_id + + entity_id: None | str | Unset + if isinstance(self.entity_id, Unset): + entity_id = UNSET + else: + entity_id = self.entity_id + + include_presentation = self.include_presentation + + include_calculation = self.include_calculation + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "report_type": report_type, + "period_start": period_start, + "period_end": period_end, + } + ) + if report_id is not UNSET: + field_dict["report_id"] = report_id + if entity_id is not UNSET: + field_dict["entity_id"] = entity_id + if include_presentation is not UNSET: + field_dict["include_presentation"] = include_presentation + if include_calculation is not UNSET: + field_dict["include_calculation"] = include_calculation + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + report_type = d.pop("report_type") + + period_start = d.pop("period_start") + + period_end = d.pop("period_end") + + def _parse_report_id(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + report_id = _parse_report_id(d.pop("report_id", UNSET)) + + def _parse_entity_id(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + entity_id = _parse_entity_id(d.pop("entity_id", UNSET)) + + include_presentation = d.pop("include_presentation", UNSET) + + include_calculation = d.pop("include_calculation", UNSET) + + save_view_request = cls( + report_type=report_type, + period_start=period_start, + period_end=period_end, + report_id=report_id, + entity_id=entity_id, + include_presentation=include_presentation, + include_calculation=include_calculation, + ) + + save_view_request.additional_properties = d + return save_view_request + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/save_view_response.py b/robosystems_client/models/save_view_response.py new file mode 100644 index 0000000..1be21b6 --- /dev/null +++ b/robosystems_client/models/save_view_response.py @@ -0,0 +1,181 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +if TYPE_CHECKING: + from ..models.fact_detail import FactDetail + from ..models.structure_detail import StructureDetail + + +T = TypeVar("T", bound="SaveViewResponse") + + +@_attrs_define +class SaveViewResponse: + """ + Attributes: + report_id (str): Unique report identifier (used as parquet export prefix) + report_type (str): + entity_id (str): + entity_name (str): + period_start (str): + period_end (str): + fact_count (int): + presentation_count (int): + calculation_count (int): + facts (list[FactDetail]): + structures (list[StructureDetail]): + created_at (str): + parquet_export_prefix (str): Prefix for parquet file exports + """ + + report_id: str + report_type: str + entity_id: str + entity_name: str + period_start: str + period_end: str + fact_count: int + presentation_count: int + calculation_count: int + facts: list[FactDetail] + structures: list[StructureDetail] + created_at: str + parquet_export_prefix: str + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + report_id = self.report_id + + report_type = self.report_type + + entity_id = self.entity_id + + entity_name = self.entity_name + + period_start = self.period_start + + period_end = self.period_end + + fact_count = self.fact_count + + presentation_count = self.presentation_count + + calculation_count = self.calculation_count + + facts = [] + for facts_item_data in self.facts: + facts_item = facts_item_data.to_dict() + facts.append(facts_item) + + structures = [] + for structures_item_data in self.structures: + structures_item = structures_item_data.to_dict() + structures.append(structures_item) + + created_at = self.created_at + + parquet_export_prefix = self.parquet_export_prefix + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "report_id": report_id, + "report_type": report_type, + "entity_id": entity_id, + "entity_name": entity_name, + "period_start": period_start, + "period_end": period_end, + "fact_count": fact_count, + "presentation_count": presentation_count, + "calculation_count": calculation_count, + "facts": facts, + "structures": structures, + "created_at": created_at, + "parquet_export_prefix": parquet_export_prefix, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.fact_detail import FactDetail + from ..models.structure_detail import StructureDetail + + d = dict(src_dict) + report_id = d.pop("report_id") + + report_type = d.pop("report_type") + + entity_id = d.pop("entity_id") + + entity_name = d.pop("entity_name") + + period_start = d.pop("period_start") + + period_end = d.pop("period_end") + + fact_count = d.pop("fact_count") + + presentation_count = d.pop("presentation_count") + + calculation_count = d.pop("calculation_count") + + facts = [] + _facts = d.pop("facts") + for facts_item_data in _facts: + facts_item = FactDetail.from_dict(facts_item_data) + + facts.append(facts_item) + + structures = [] + _structures = d.pop("structures") + for structures_item_data in _structures: + structures_item = StructureDetail.from_dict(structures_item_data) + + structures.append(structures_item) + + created_at = d.pop("created_at") + + parquet_export_prefix = d.pop("parquet_export_prefix") + + save_view_response = cls( + report_id=report_id, + report_type=report_type, + entity_id=entity_id, + entity_name=entity_name, + period_start=period_start, + period_end=period_end, + fact_count=fact_count, + presentation_count=presentation_count, + calculation_count=calculation_count, + facts=facts, + structures=structures, + created_at=created_at, + parquet_export_prefix=parquet_export_prefix, + ) + + save_view_response.additional_properties = d + return save_view_response + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/schema_export_response.py b/robosystems_client/models/schema_export_response.py index da700c2..461c094 100644 --- a/robosystems_client/models/schema_export_response.py +++ b/robosystems_client/models/schema_export_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -24,19 +26,19 @@ class SchemaExportResponse: Attributes: graph_id (str): Graph ID - schema_definition (Union['SchemaExportResponseSchemaDefinitionType0', str]): Exported schema definition (format - depends on 'format' parameter) + schema_definition (SchemaExportResponseSchemaDefinitionType0 | str): Exported schema definition (format depends + on 'format' parameter) format_ (str): Export format used exported_at (str): Export timestamp - data_stats (Union['SchemaExportResponseDataStatsType0', None, Unset]): Data statistics if requested (only when + data_stats (None | SchemaExportResponseDataStatsType0 | Unset): Data statistics if requested (only when include_data_stats=true) """ graph_id: str - schema_definition: Union["SchemaExportResponseSchemaDefinitionType0", str] + schema_definition: SchemaExportResponseSchemaDefinitionType0 | str format_: str exported_at: str - data_stats: Union["SchemaExportResponseDataStatsType0", None, Unset] = UNSET + data_stats: None | SchemaExportResponseDataStatsType0 | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -49,7 +51,7 @@ def to_dict(self) -> dict[str, Any]: graph_id = self.graph_id - schema_definition: Union[dict[str, Any], str] + schema_definition: dict[str, Any] | str if isinstance(self.schema_definition, SchemaExportResponseSchemaDefinitionType0): schema_definition = self.schema_definition.to_dict() else: @@ -59,7 +61,7 @@ def to_dict(self) -> dict[str, Any]: exported_at = self.exported_at - data_stats: Union[None, Unset, dict[str, Any]] + data_stats: dict[str, Any] | None | Unset if isinstance(self.data_stats, Unset): data_stats = UNSET elif isinstance(self.data_stats, SchemaExportResponseDataStatsType0): @@ -96,7 +98,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: def _parse_schema_definition( data: object, - ) -> Union["SchemaExportResponseSchemaDefinitionType0", str]: + ) -> SchemaExportResponseSchemaDefinitionType0 | str: try: if not isinstance(data, dict): raise TypeError() @@ -105,9 +107,9 @@ def _parse_schema_definition( ) return schema_definition_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["SchemaExportResponseSchemaDefinitionType0", str], data) + return cast(SchemaExportResponseSchemaDefinitionType0 | str, data) schema_definition = _parse_schema_definition(d.pop("schema_definition")) @@ -117,7 +119,7 @@ def _parse_schema_definition( def _parse_data_stats( data: object, - ) -> Union["SchemaExportResponseDataStatsType0", None, Unset]: + ) -> None | SchemaExportResponseDataStatsType0 | Unset: if data is None: return data if isinstance(data, Unset): @@ -128,9 +130,9 @@ def _parse_data_stats( data_stats_type_0 = SchemaExportResponseDataStatsType0.from_dict(data) return data_stats_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["SchemaExportResponseDataStatsType0", None, Unset], data) + return cast(None | SchemaExportResponseDataStatsType0 | Unset, data) data_stats = _parse_data_stats(d.pop("data_stats", UNSET)) diff --git a/robosystems_client/models/schema_export_response_data_stats_type_0.py b/robosystems_client/models/schema_export_response_data_stats_type_0.py index 3a2e329..41f9208 100644 --- a/robosystems_client/models/schema_export_response_data_stats_type_0.py +++ b/robosystems_client/models/schema_export_response_data_stats_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/schema_export_response_schema_definition_type_0.py b/robosystems_client/models/schema_export_response_schema_definition_type_0.py index fb2a29b..fde19fe 100644 --- a/robosystems_client/models/schema_export_response_schema_definition_type_0.py +++ b/robosystems_client/models/schema_export_response_schema_definition_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/schema_info_response.py b/robosystems_client/models/schema_info_response.py index 527217c..f6a5822 100644 --- a/robosystems_client/models/schema_info_response.py +++ b/robosystems_client/models/schema_info_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -24,7 +26,7 @@ class SchemaInfoResponse: """ graph_id: str - schema: "SchemaInfoResponseSchema" + schema: SchemaInfoResponseSchema additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: diff --git a/robosystems_client/models/schema_info_response_schema.py b/robosystems_client/models/schema_info_response_schema.py index bf5551a..9003860 100644 --- a/robosystems_client/models/schema_info_response_schema.py +++ b/robosystems_client/models/schema_info_response_schema.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/schema_validation_request.py b/robosystems_client/models/schema_validation_request.py index 4ba7579..16f7c34 100644 --- a/robosystems_client/models/schema_validation_request.py +++ b/robosystems_client/models/schema_validation_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -20,16 +22,15 @@ class SchemaValidationRequest: """Request model for schema validation. Attributes: - schema_definition (Union['SchemaValidationRequestSchemaDefinitionType0', str]): Schema definition as JSON dict - or JSON/YAML string - format_ (Union[Unset, str]): Schema format: json, yaml, or dict Default: 'json'. - check_compatibility (Union[None, Unset, list[str]]): List of existing schema extensions to check compatibility - with + schema_definition (SchemaValidationRequestSchemaDefinitionType0 | str): Schema definition as JSON dict or + JSON/YAML string + format_ (str | Unset): Schema format: json, yaml, or dict Default: 'json'. + check_compatibility (list[str] | None | Unset): List of existing schema extensions to check compatibility with """ - schema_definition: Union["SchemaValidationRequestSchemaDefinitionType0", str] - format_: Union[Unset, str] = "json" - check_compatibility: Union[None, Unset, list[str]] = UNSET + schema_definition: SchemaValidationRequestSchemaDefinitionType0 | str + format_: str | Unset = "json" + check_compatibility: list[str] | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -37,7 +38,7 @@ def to_dict(self) -> dict[str, Any]: SchemaValidationRequestSchemaDefinitionType0, ) - schema_definition: Union[dict[str, Any], str] + schema_definition: dict[str, Any] | str if isinstance(self.schema_definition, SchemaValidationRequestSchemaDefinitionType0): schema_definition = self.schema_definition.to_dict() else: @@ -45,7 +46,7 @@ def to_dict(self) -> dict[str, Any]: format_ = self.format_ - check_compatibility: Union[None, Unset, list[str]] + check_compatibility: list[str] | None | Unset if isinstance(self.check_compatibility, Unset): check_compatibility = UNSET elif isinstance(self.check_compatibility, list): @@ -78,7 +79,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: def _parse_schema_definition( data: object, - ) -> Union["SchemaValidationRequestSchemaDefinitionType0", str]: + ) -> SchemaValidationRequestSchemaDefinitionType0 | str: try: if not isinstance(data, dict): raise TypeError() @@ -87,15 +88,15 @@ def _parse_schema_definition( ) return schema_definition_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["SchemaValidationRequestSchemaDefinitionType0", str], data) + return cast(SchemaValidationRequestSchemaDefinitionType0 | str, data) schema_definition = _parse_schema_definition(d.pop("schema_definition")) format_ = d.pop("format", UNSET) - def _parse_check_compatibility(data: object) -> Union[None, Unset, list[str]]: + def _parse_check_compatibility(data: object) -> list[str] | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -106,9 +107,9 @@ def _parse_check_compatibility(data: object) -> Union[None, Unset, list[str]]: check_compatibility_type_0 = cast(list[str], data) return check_compatibility_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union[None, Unset, list[str]], data) + return cast(list[str] | None | Unset, data) check_compatibility = _parse_check_compatibility( d.pop("check_compatibility", UNSET) diff --git a/robosystems_client/models/schema_validation_request_schema_definition_type_0.py b/robosystems_client/models/schema_validation_request_schema_definition_type_0.py index 7184d02..79573fa 100644 --- a/robosystems_client/models/schema_validation_request_schema_definition_type_0.py +++ b/robosystems_client/models/schema_validation_request_schema_definition_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/schema_validation_response.py b/robosystems_client/models/schema_validation_response.py index 9dee137..1fac5b6 100644 --- a/robosystems_client/models/schema_validation_response.py +++ b/robosystems_client/models/schema_validation_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -25,23 +27,20 @@ class SchemaValidationResponse: Attributes: valid (bool): Whether the schema is valid message (str): Validation message - errors (Union[None, Unset, list[str]]): List of validation errors (only present when valid=false) - warnings (Union[None, Unset, list[str]]): List of validation warnings (schema is still valid but has potential + errors (list[str] | None | Unset): List of validation errors (only present when valid=false) + warnings (list[str] | None | Unset): List of validation warnings (schema is still valid but has potential issues) - stats (Union['SchemaValidationResponseStatsType0', None, Unset]): Schema statistics (only present when - valid=true) - compatibility (Union['SchemaValidationResponseCompatibilityType0', None, Unset]): Compatibility check results - (only when check_compatibility specified) + stats (None | SchemaValidationResponseStatsType0 | Unset): Schema statistics (only present when valid=true) + compatibility (None | SchemaValidationResponseCompatibilityType0 | Unset): Compatibility check results (only + when check_compatibility specified) """ valid: bool message: str - errors: Union[None, Unset, list[str]] = UNSET - warnings: Union[None, Unset, list[str]] = UNSET - stats: Union["SchemaValidationResponseStatsType0", None, Unset] = UNSET - compatibility: Union["SchemaValidationResponseCompatibilityType0", None, Unset] = ( - UNSET - ) + errors: list[str] | None | Unset = UNSET + warnings: list[str] | None | Unset = UNSET + stats: None | SchemaValidationResponseStatsType0 | Unset = UNSET + compatibility: None | SchemaValidationResponseCompatibilityType0 | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -56,7 +55,7 @@ def to_dict(self) -> dict[str, Any]: message = self.message - errors: Union[None, Unset, list[str]] + errors: list[str] | None | Unset if isinstance(self.errors, Unset): errors = UNSET elif isinstance(self.errors, list): @@ -65,7 +64,7 @@ def to_dict(self) -> dict[str, Any]: else: errors = self.errors - warnings: Union[None, Unset, list[str]] + warnings: list[str] | None | Unset if isinstance(self.warnings, Unset): warnings = UNSET elif isinstance(self.warnings, list): @@ -74,7 +73,7 @@ def to_dict(self) -> dict[str, Any]: else: warnings = self.warnings - stats: Union[None, Unset, dict[str, Any]] + stats: dict[str, Any] | None | Unset if isinstance(self.stats, Unset): stats = UNSET elif isinstance(self.stats, SchemaValidationResponseStatsType0): @@ -82,7 +81,7 @@ def to_dict(self) -> dict[str, Any]: else: stats = self.stats - compatibility: Union[None, Unset, dict[str, Any]] + compatibility: dict[str, Any] | None | Unset if isinstance(self.compatibility, Unset): compatibility = UNSET elif isinstance(self.compatibility, SchemaValidationResponseCompatibilityType0): @@ -123,7 +122,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: message = d.pop("message") - def _parse_errors(data: object) -> Union[None, Unset, list[str]]: + def _parse_errors(data: object) -> list[str] | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -134,13 +133,13 @@ def _parse_errors(data: object) -> Union[None, Unset, list[str]]: errors_type_0 = cast(list[str], data) return errors_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union[None, Unset, list[str]], data) + return cast(list[str] | None | Unset, data) errors = _parse_errors(d.pop("errors", UNSET)) - def _parse_warnings(data: object) -> Union[None, Unset, list[str]]: + def _parse_warnings(data: object) -> list[str] | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -151,15 +150,13 @@ def _parse_warnings(data: object) -> Union[None, Unset, list[str]]: warnings_type_0 = cast(list[str], data) return warnings_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union[None, Unset, list[str]], data) + return cast(list[str] | None | Unset, data) warnings = _parse_warnings(d.pop("warnings", UNSET)) - def _parse_stats( - data: object, - ) -> Union["SchemaValidationResponseStatsType0", None, Unset]: + def _parse_stats(data: object) -> None | SchemaValidationResponseStatsType0 | Unset: if data is None: return data if isinstance(data, Unset): @@ -170,15 +167,15 @@ def _parse_stats( stats_type_0 = SchemaValidationResponseStatsType0.from_dict(data) return stats_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["SchemaValidationResponseStatsType0", None, Unset], data) + return cast(None | SchemaValidationResponseStatsType0 | Unset, data) stats = _parse_stats(d.pop("stats", UNSET)) def _parse_compatibility( data: object, - ) -> Union["SchemaValidationResponseCompatibilityType0", None, Unset]: + ) -> None | SchemaValidationResponseCompatibilityType0 | Unset: if data is None: return data if isinstance(data, Unset): @@ -191,11 +188,9 @@ def _parse_compatibility( ) return compatibility_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast( - Union["SchemaValidationResponseCompatibilityType0", None, Unset], data - ) + return cast(None | SchemaValidationResponseCompatibilityType0 | Unset, data) compatibility = _parse_compatibility(d.pop("compatibility", UNSET)) diff --git a/robosystems_client/models/schema_validation_response_compatibility_type_0.py b/robosystems_client/models/schema_validation_response_compatibility_type_0.py index 845259f..9db588e 100644 --- a/robosystems_client/models/schema_validation_response_compatibility_type_0.py +++ b/robosystems_client/models/schema_validation_response_compatibility_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/schema_validation_response_stats_type_0.py b/robosystems_client/models/schema_validation_response_stats_type_0.py index e855c8d..daafa2c 100644 --- a/robosystems_client/models/schema_validation_response_stats_type_0.py +++ b/robosystems_client/models/schema_validation_response_stats_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/sec_connection_config.py b/robosystems_client/models/sec_connection_config.py index fb52494..cea8f3f 100644 --- a/robosystems_client/models/sec_connection_config.py +++ b/robosystems_client/models/sec_connection_config.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -15,17 +17,17 @@ class SECConnectionConfig: Attributes: cik (str): 10-digit CIK number - entity_name (Union[None, Unset, str]): Entity name from SEC + entity_name (None | str | Unset): Entity name from SEC """ cik: str - entity_name: Union[None, Unset, str] = UNSET + entity_name: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: cik = self.cik - entity_name: Union[None, Unset, str] + entity_name: None | str | Unset if isinstance(self.entity_name, Unset): entity_name = UNSET else: @@ -48,12 +50,12 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) cik = d.pop("cik") - def _parse_entity_name(data: object) -> Union[None, Unset, str]: + def _parse_entity_name(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) entity_name = _parse_entity_name(d.pop("entity_name", UNSET)) diff --git a/robosystems_client/models/selection_criteria.py b/robosystems_client/models/selection_criteria.py index 9d19602..fc582b3 100644 --- a/robosystems_client/models/selection_criteria.py +++ b/robosystems_client/models/selection_criteria.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -15,28 +17,28 @@ class SelectionCriteria: """Criteria for agent selection. Attributes: - min_confidence (Union[Unset, float]): Minimum confidence score Default: 0.3. - required_capabilities (Union[Unset, list[str]]): Required agent capabilities - preferred_mode (Union[AgentMode, None, Unset]): Preferred execution mode - max_response_time (Union[Unset, float]): Maximum response time in seconds Default: 60.0. - excluded_agents (Union[Unset, list[str]]): Agents to exclude from selection + min_confidence (float | Unset): Minimum confidence score Default: 0.3. + required_capabilities (list[str] | Unset): Required agent capabilities + preferred_mode (AgentMode | None | Unset): Preferred execution mode + max_response_time (float | Unset): Maximum response time in seconds Default: 60.0. + excluded_agents (list[str] | Unset): Agents to exclude from selection """ - min_confidence: Union[Unset, float] = 0.3 - required_capabilities: Union[Unset, list[str]] = UNSET - preferred_mode: Union[AgentMode, None, Unset] = UNSET - max_response_time: Union[Unset, float] = 60.0 - excluded_agents: Union[Unset, list[str]] = UNSET + min_confidence: float | Unset = 0.3 + required_capabilities: list[str] | Unset = UNSET + preferred_mode: AgentMode | None | Unset = UNSET + max_response_time: float | Unset = 60.0 + excluded_agents: list[str] | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: min_confidence = self.min_confidence - required_capabilities: Union[Unset, list[str]] = UNSET + required_capabilities: list[str] | Unset = UNSET if not isinstance(self.required_capabilities, Unset): required_capabilities = self.required_capabilities - preferred_mode: Union[None, Unset, str] + preferred_mode: None | str | Unset if isinstance(self.preferred_mode, Unset): preferred_mode = UNSET elif isinstance(self.preferred_mode, AgentMode): @@ -46,7 +48,7 @@ def to_dict(self) -> dict[str, Any]: max_response_time = self.max_response_time - excluded_agents: Union[Unset, list[str]] = UNSET + excluded_agents: list[str] | Unset = UNSET if not isinstance(self.excluded_agents, Unset): excluded_agents = self.excluded_agents @@ -73,7 +75,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: required_capabilities = cast(list[str], d.pop("required_capabilities", UNSET)) - def _parse_preferred_mode(data: object) -> Union[AgentMode, None, Unset]: + def _parse_preferred_mode(data: object) -> AgentMode | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -84,9 +86,9 @@ def _parse_preferred_mode(data: object) -> Union[AgentMode, None, Unset]: preferred_mode_type_0 = AgentMode(data) return preferred_mode_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union[AgentMode, None, Unset], data) + return cast(AgentMode | None | Unset, data) preferred_mode = _parse_preferred_mode(d.pop("preferred_mode", UNSET)) diff --git a/robosystems_client/models/service_offering_summary.py b/robosystems_client/models/service_offering_summary.py index 7d9ac8f..8181be4 100644 --- a/robosystems_client/models/service_offering_summary.py +++ b/robosystems_client/models/service_offering_summary.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/service_offerings_response.py b/robosystems_client/models/service_offerings_response.py index 6d926f4..928bd0d 100644 --- a/robosystems_client/models/service_offerings_response.py +++ b/robosystems_client/models/service_offerings_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -34,10 +36,10 @@ class ServiceOfferingsResponse: """ billing_enabled: bool - graph_subscriptions: "GraphSubscriptions" - repository_subscriptions: "RepositorySubscriptions" - operation_costs: "OperationCosts" - summary: "ServiceOfferingSummary" + graph_subscriptions: GraphSubscriptions + repository_subscriptions: RepositorySubscriptions + operation_costs: OperationCosts + summary: ServiceOfferingSummary additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: diff --git a/robosystems_client/models/sso_complete_request.py b/robosystems_client/models/sso_complete_request.py index a2ef8fa..eaab89f 100644 --- a/robosystems_client/models/sso_complete_request.py +++ b/robosystems_client/models/sso_complete_request.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/sso_exchange_request.py b/robosystems_client/models/sso_exchange_request.py index 0457b72..ecd8798 100644 --- a/robosystems_client/models/sso_exchange_request.py +++ b/robosystems_client/models/sso_exchange_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -16,12 +18,12 @@ class SSOExchangeRequest: Attributes: token (str): Temporary SSO token target_app (str): Target application identifier - return_url (Union[None, Unset, str]): Optional return URL after authentication + return_url (None | str | Unset): Optional return URL after authentication """ token: str target_app: str - return_url: Union[None, Unset, str] = UNSET + return_url: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -29,7 +31,7 @@ def to_dict(self) -> dict[str, Any]: target_app = self.target_app - return_url: Union[None, Unset, str] + return_url: None | str | Unset if isinstance(self.return_url, Unset): return_url = UNSET else: @@ -55,12 +57,12 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: target_app = d.pop("target_app") - def _parse_return_url(data: object) -> Union[None, Unset, str]: + def _parse_return_url(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) return_url = _parse_return_url(d.pop("return_url", UNSET)) diff --git a/robosystems_client/models/sso_exchange_response.py b/robosystems_client/models/sso_exchange_response.py index eea4b66..28eac5a 100644 --- a/robosystems_client/models/sso_exchange_response.py +++ b/robosystems_client/models/sso_exchange_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import datetime from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/sso_token_response.py b/robosystems_client/models/sso_token_response.py index 40c06aa..8c1e427 100644 --- a/robosystems_client/models/sso_token_response.py +++ b/robosystems_client/models/sso_token_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import datetime from collections.abc import Mapping from typing import Any, TypeVar, cast diff --git a/robosystems_client/models/storage_info.py b/robosystems_client/models/storage_info.py index b150e28..801c6e0 100644 --- a/robosystems_client/models/storage_info.py +++ b/robosystems_client/models/storage_info.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -21,8 +23,8 @@ class StorageInfo: overage_pricing (StorageInfoOveragePricing): Overage pricing per GB per tier """ - included_per_tier: "StorageInfoIncludedPerTier" - overage_pricing: "StorageInfoOveragePricing" + included_per_tier: StorageInfoIncludedPerTier + overage_pricing: StorageInfoOveragePricing additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: diff --git a/robosystems_client/models/storage_info_included_per_tier.py b/robosystems_client/models/storage_info_included_per_tier.py index d2745bc..cd081aa 100644 --- a/robosystems_client/models/storage_info_included_per_tier.py +++ b/robosystems_client/models/storage_info_included_per_tier.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/storage_info_overage_pricing.py b/robosystems_client/models/storage_info_overage_pricing.py index 5d25dc0..294eb4f 100644 --- a/robosystems_client/models/storage_info_overage_pricing.py +++ b/robosystems_client/models/storage_info_overage_pricing.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/storage_limit_response.py b/robosystems_client/models/storage_limit_response.py index 20cd6a8..31ec32f 100644 --- a/robosystems_client/models/storage_limit_response.py +++ b/robosystems_client/models/storage_limit_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -22,7 +24,7 @@ class StorageLimitResponse: approaching_limit (bool): needs_warning (bool): has_override (bool): - recommendations (Union[None, Unset, list[str]]): + recommendations (list[str] | None | Unset): """ graph_id: str @@ -33,7 +35,7 @@ class StorageLimitResponse: approaching_limit: bool needs_warning: bool has_override: bool - recommendations: Union[None, Unset, list[str]] = UNSET + recommendations: list[str] | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -53,7 +55,7 @@ def to_dict(self) -> dict[str, Any]: has_override = self.has_override - recommendations: Union[None, Unset, list[str]] + recommendations: list[str] | None | Unset if isinstance(self.recommendations, Unset): recommendations = UNSET elif isinstance(self.recommendations, list): @@ -100,7 +102,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: has_override = d.pop("has_override") - def _parse_recommendations(data: object) -> Union[None, Unset, list[str]]: + def _parse_recommendations(data: object) -> list[str] | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -111,9 +113,9 @@ def _parse_recommendations(data: object) -> Union[None, Unset, list[str]]: recommendations_type_0 = cast(list[str], data) return recommendations_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union[None, Unset, list[str]], data) + return cast(list[str] | None | Unset, data) recommendations = _parse_recommendations(d.pop("recommendations", UNSET)) diff --git a/robosystems_client/models/storage_limits.py b/robosystems_client/models/storage_limits.py index 30c9c41..6b67514 100644 --- a/robosystems_client/models/storage_limits.py +++ b/robosystems_client/models/storage_limits.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -16,12 +18,12 @@ class StorageLimits: Attributes: max_storage_gb (float): Maximum storage limit in GB approaching_limit (bool): Whether approaching storage limit (>80%) - current_usage_gb (Union[None, Unset, float]): Current storage usage in GB + current_usage_gb (float | None | Unset): Current storage usage in GB """ max_storage_gb: float approaching_limit: bool - current_usage_gb: Union[None, Unset, float] = UNSET + current_usage_gb: float | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -29,7 +31,7 @@ def to_dict(self) -> dict[str, Any]: approaching_limit = self.approaching_limit - current_usage_gb: Union[None, Unset, float] + current_usage_gb: float | None | Unset if isinstance(self.current_usage_gb, Unset): current_usage_gb = UNSET else: @@ -55,12 +57,12 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: approaching_limit = d.pop("approaching_limit") - def _parse_current_usage_gb(data: object) -> Union[None, Unset, float]: + def _parse_current_usage_gb(data: object) -> float | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, float], data) + return cast(float | None | Unset, data) current_usage_gb = _parse_current_usage_gb(d.pop("current_usage_gb", UNSET)) diff --git a/robosystems_client/models/storage_summary.py b/robosystems_client/models/storage_summary.py index bd5dfa3..a0a9c06 100644 --- a/robosystems_client/models/storage_summary.py +++ b/robosystems_client/models/storage_summary.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/structure_detail.py b/robosystems_client/models/structure_detail.py new file mode 100644 index 0000000..2a3a1da --- /dev/null +++ b/robosystems_client/models/structure_detail.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="StructureDetail") + + +@_attrs_define +class StructureDetail: + """ + Attributes: + structure_id (str): + structure_type (str): + name (str): + element_count (int): + """ + + structure_id: str + structure_type: str + name: str + element_count: int + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + structure_id = self.structure_id + + structure_type = self.structure_type + + name = self.name + + element_count = self.element_count + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "structure_id": structure_id, + "structure_type": structure_type, + "name": name, + "element_count": element_count, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + structure_id = d.pop("structure_id") + + structure_type = d.pop("structure_type") + + name = d.pop("name") + + element_count = d.pop("element_count") + + structure_detail = cls( + structure_id=structure_id, + structure_type=structure_type, + name=name, + element_count=element_count, + ) + + structure_detail.additional_properties = d + return structure_detail + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/subgraph_quota_response.py b/robosystems_client/models/subgraph_quota_response.py index 9a35baf..1e5c241 100644 --- a/robosystems_client/models/subgraph_quota_response.py +++ b/robosystems_client/models/subgraph_quota_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -17,19 +19,19 @@ class SubgraphQuotaResponse: parent_graph_id (str): Parent graph identifier tier (str): Graph tier current_count (int): Current number of subgraphs - max_allowed (Union[None, Unset, int]): Maximum allowed subgraphs (None = unlimited) - remaining (Union[None, Unset, int]): Remaining subgraphs that can be created - total_size_mb (Union[None, Unset, float]): Total size of all subgraphs - max_size_mb (Union[None, Unset, float]): Maximum allowed total size + max_allowed (int | None | Unset): Maximum allowed subgraphs (None = unlimited) + remaining (int | None | Unset): Remaining subgraphs that can be created + total_size_mb (float | None | Unset): Total size of all subgraphs + max_size_mb (float | None | Unset): Maximum allowed total size """ parent_graph_id: str tier: str current_count: int - max_allowed: Union[None, Unset, int] = UNSET - remaining: Union[None, Unset, int] = UNSET - total_size_mb: Union[None, Unset, float] = UNSET - max_size_mb: Union[None, Unset, float] = UNSET + max_allowed: int | None | Unset = UNSET + remaining: int | None | Unset = UNSET + total_size_mb: float | None | Unset = UNSET + max_size_mb: float | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -39,25 +41,25 @@ def to_dict(self) -> dict[str, Any]: current_count = self.current_count - max_allowed: Union[None, Unset, int] + max_allowed: int | None | Unset if isinstance(self.max_allowed, Unset): max_allowed = UNSET else: max_allowed = self.max_allowed - remaining: Union[None, Unset, int] + remaining: int | None | Unset if isinstance(self.remaining, Unset): remaining = UNSET else: remaining = self.remaining - total_size_mb: Union[None, Unset, float] + total_size_mb: float | None | Unset if isinstance(self.total_size_mb, Unset): total_size_mb = UNSET else: total_size_mb = self.total_size_mb - max_size_mb: Union[None, Unset, float] + max_size_mb: float | None | Unset if isinstance(self.max_size_mb, Unset): max_size_mb = UNSET else: @@ -92,39 +94,39 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: current_count = d.pop("current_count") - def _parse_max_allowed(data: object) -> Union[None, Unset, int]: + def _parse_max_allowed(data: object) -> int | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, int], data) + return cast(int | None | Unset, data) max_allowed = _parse_max_allowed(d.pop("max_allowed", UNSET)) - def _parse_remaining(data: object) -> Union[None, Unset, int]: + def _parse_remaining(data: object) -> int | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, int], data) + return cast(int | None | Unset, data) remaining = _parse_remaining(d.pop("remaining", UNSET)) - def _parse_total_size_mb(data: object) -> Union[None, Unset, float]: + def _parse_total_size_mb(data: object) -> float | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, float], data) + return cast(float | None | Unset, data) total_size_mb = _parse_total_size_mb(d.pop("total_size_mb", UNSET)) - def _parse_max_size_mb(data: object) -> Union[None, Unset, float]: + def _parse_max_size_mb(data: object) -> float | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, float], data) + return cast(float | None | Unset, data) max_size_mb = _parse_max_size_mb(d.pop("max_size_mb", UNSET)) diff --git a/robosystems_client/models/subgraph_response.py b/robosystems_client/models/subgraph_response.py index 04ab792..ddc7174 100644 --- a/robosystems_client/models/subgraph_response.py +++ b/robosystems_client/models/subgraph_response.py @@ -1,6 +1,8 @@ +from __future__ import annotations + import datetime from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -30,12 +32,12 @@ class SubgraphResponse: status (str): Current status of the subgraph created_at (datetime.datetime): When the subgraph was created updated_at (datetime.datetime): When the subgraph was last updated - description (Union[None, Unset, str]): Description of the subgraph's purpose - size_mb (Union[None, Unset, float]): Size of the subgraph database in megabytes - node_count (Union[None, Unset, int]): Number of nodes in the subgraph - edge_count (Union[None, Unset, int]): Number of edges in the subgraph - last_accessed (Union[None, Unset, datetime.datetime]): When the subgraph was last accessed - metadata (Union['SubgraphResponseMetadataType0', None, Unset]): Additional metadata for the subgraph + description (None | str | Unset): Description of the subgraph's purpose + size_mb (float | None | Unset): Size of the subgraph database in megabytes + node_count (int | None | Unset): Number of nodes in the subgraph + edge_count (int | None | Unset): Number of edges in the subgraph + last_accessed (datetime.datetime | None | Unset): When the subgraph was last accessed + metadata (None | SubgraphResponseMetadataType0 | Unset): Additional metadata for the subgraph """ graph_id: str @@ -47,12 +49,12 @@ class SubgraphResponse: status: str created_at: datetime.datetime updated_at: datetime.datetime - description: Union[None, Unset, str] = UNSET - size_mb: Union[None, Unset, float] = UNSET - node_count: Union[None, Unset, int] = UNSET - edge_count: Union[None, Unset, int] = UNSET - last_accessed: Union[None, Unset, datetime.datetime] = UNSET - metadata: Union["SubgraphResponseMetadataType0", None, Unset] = UNSET + description: None | str | Unset = UNSET + size_mb: float | None | Unset = UNSET + node_count: int | None | Unset = UNSET + edge_count: int | None | Unset = UNSET + last_accessed: datetime.datetime | None | Unset = UNSET + metadata: None | SubgraphResponseMetadataType0 | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -76,31 +78,31 @@ def to_dict(self) -> dict[str, Any]: updated_at = self.updated_at.isoformat() - description: Union[None, Unset, str] + description: None | str | Unset if isinstance(self.description, Unset): description = UNSET else: description = self.description - size_mb: Union[None, Unset, float] + size_mb: float | None | Unset if isinstance(self.size_mb, Unset): size_mb = UNSET else: size_mb = self.size_mb - node_count: Union[None, Unset, int] + node_count: int | None | Unset if isinstance(self.node_count, Unset): node_count = UNSET else: node_count = self.node_count - edge_count: Union[None, Unset, int] + edge_count: int | None | Unset if isinstance(self.edge_count, Unset): edge_count = UNSET else: edge_count = self.edge_count - last_accessed: Union[None, Unset, str] + last_accessed: None | str | Unset if isinstance(self.last_accessed, Unset): last_accessed = UNSET elif isinstance(self.last_accessed, datetime.datetime): @@ -108,7 +110,7 @@ def to_dict(self) -> dict[str, Any]: else: last_accessed = self.last_accessed - metadata: Union[None, Unset, dict[str, Any]] + metadata: dict[str, Any] | None | Unset if isinstance(self.metadata, Unset): metadata = UNSET elif isinstance(self.metadata, SubgraphResponseMetadataType0): @@ -169,43 +171,43 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: updated_at = isoparse(d.pop("updated_at")) - def _parse_description(data: object) -> Union[None, Unset, str]: + def _parse_description(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) description = _parse_description(d.pop("description", UNSET)) - def _parse_size_mb(data: object) -> Union[None, Unset, float]: + def _parse_size_mb(data: object) -> float | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, float], data) + return cast(float | None | Unset, data) size_mb = _parse_size_mb(d.pop("size_mb", UNSET)) - def _parse_node_count(data: object) -> Union[None, Unset, int]: + def _parse_node_count(data: object) -> int | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, int], data) + return cast(int | None | Unset, data) node_count = _parse_node_count(d.pop("node_count", UNSET)) - def _parse_edge_count(data: object) -> Union[None, Unset, int]: + def _parse_edge_count(data: object) -> int | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, int], data) + return cast(int | None | Unset, data) edge_count = _parse_edge_count(d.pop("edge_count", UNSET)) - def _parse_last_accessed(data: object) -> Union[None, Unset, datetime.datetime]: + def _parse_last_accessed(data: object) -> datetime.datetime | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -216,15 +218,13 @@ def _parse_last_accessed(data: object) -> Union[None, Unset, datetime.datetime]: last_accessed_type_0 = isoparse(data) return last_accessed_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union[None, Unset, datetime.datetime], data) + return cast(datetime.datetime | None | Unset, data) last_accessed = _parse_last_accessed(d.pop("last_accessed", UNSET)) - def _parse_metadata( - data: object, - ) -> Union["SubgraphResponseMetadataType0", None, Unset]: + def _parse_metadata(data: object) -> None | SubgraphResponseMetadataType0 | Unset: if data is None: return data if isinstance(data, Unset): @@ -235,9 +235,9 @@ def _parse_metadata( metadata_type_0 = SubgraphResponseMetadataType0.from_dict(data) return metadata_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["SubgraphResponseMetadataType0", None, Unset], data) + return cast(None | SubgraphResponseMetadataType0 | Unset, data) metadata = _parse_metadata(d.pop("metadata", UNSET)) diff --git a/robosystems_client/models/subgraph_response_metadata_type_0.py b/robosystems_client/models/subgraph_response_metadata_type_0.py index 06cfc74..1266c8f 100644 --- a/robosystems_client/models/subgraph_response_metadata_type_0.py +++ b/robosystems_client/models/subgraph_response_metadata_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/subgraph_summary.py b/robosystems_client/models/subgraph_summary.py index 8f41395..927cfb5 100644 --- a/robosystems_client/models/subgraph_summary.py +++ b/robosystems_client/models/subgraph_summary.py @@ -1,6 +1,8 @@ +from __future__ import annotations + import datetime from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -23,8 +25,8 @@ class SubgraphSummary: subgraph_type (SubgraphType): Types of subgraphs. status (str): Current status created_at (datetime.datetime): Creation timestamp - size_mb (Union[None, Unset, float]): Size in megabytes - last_accessed (Union[None, Unset, datetime.datetime]): Last access timestamp + size_mb (float | None | Unset): Size in megabytes + last_accessed (datetime.datetime | None | Unset): Last access timestamp """ graph_id: str @@ -33,8 +35,8 @@ class SubgraphSummary: subgraph_type: SubgraphType status: str created_at: datetime.datetime - size_mb: Union[None, Unset, float] = UNSET - last_accessed: Union[None, Unset, datetime.datetime] = UNSET + size_mb: float | None | Unset = UNSET + last_accessed: datetime.datetime | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -50,13 +52,13 @@ def to_dict(self) -> dict[str, Any]: created_at = self.created_at.isoformat() - size_mb: Union[None, Unset, float] + size_mb: float | None | Unset if isinstance(self.size_mb, Unset): size_mb = UNSET else: size_mb = self.size_mb - last_accessed: Union[None, Unset, str] + last_accessed: None | str | Unset if isinstance(self.last_accessed, Unset): last_accessed = UNSET elif isinstance(self.last_accessed, datetime.datetime): @@ -98,16 +100,16 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: created_at = isoparse(d.pop("created_at")) - def _parse_size_mb(data: object) -> Union[None, Unset, float]: + def _parse_size_mb(data: object) -> float | None | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, float], data) + return cast(float | None | Unset, data) size_mb = _parse_size_mb(d.pop("size_mb", UNSET)) - def _parse_last_accessed(data: object) -> Union[None, Unset, datetime.datetime]: + def _parse_last_accessed(data: object) -> datetime.datetime | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -118,9 +120,9 @@ def _parse_last_accessed(data: object) -> Union[None, Unset, datetime.datetime]: last_accessed_type_0 = isoparse(data) return last_accessed_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union[None, Unset, datetime.datetime], data) + return cast(datetime.datetime | None | Unset, data) last_accessed = _parse_last_accessed(d.pop("last_accessed", UNSET)) diff --git a/robosystems_client/models/success_response.py b/robosystems_client/models/success_response.py index 584d1cd..1848ff0 100644 --- a/robosystems_client/models/success_response.py +++ b/robosystems_client/models/success_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -22,13 +24,13 @@ class SuccessResponse: Attributes: message (str): Human-readable success message - success (Union[Unset, bool]): Indicates the operation completed successfully Default: True. - data (Union['SuccessResponseDataType0', None, Unset]): Optional additional data related to the operation + success (bool | Unset): Indicates the operation completed successfully Default: True. + data (None | SuccessResponseDataType0 | Unset): Optional additional data related to the operation """ message: str - success: Union[Unset, bool] = True - data: Union["SuccessResponseDataType0", None, Unset] = UNSET + success: bool | Unset = True + data: None | SuccessResponseDataType0 | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -38,7 +40,7 @@ def to_dict(self) -> dict[str, Any]: success = self.success - data: Union[None, Unset, dict[str, Any]] + data: dict[str, Any] | None | Unset if isinstance(self.data, Unset): data = UNSET elif isinstance(self.data, SuccessResponseDataType0): @@ -69,7 +71,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: success = d.pop("success", UNSET) - def _parse_data(data: object) -> Union["SuccessResponseDataType0", None, Unset]: + def _parse_data(data: object) -> None | SuccessResponseDataType0 | Unset: if data is None: return data if isinstance(data, Unset): @@ -80,9 +82,9 @@ def _parse_data(data: object) -> Union["SuccessResponseDataType0", None, Unset]: data_type_0 = SuccessResponseDataType0.from_dict(data) return data_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["SuccessResponseDataType0", None, Unset], data) + return cast(None | SuccessResponseDataType0 | Unset, data) data = _parse_data(d.pop("data", UNSET)) diff --git a/robosystems_client/models/success_response_data_type_0.py b/robosystems_client/models/success_response_data_type_0.py index 8733fb1..24096cc 100644 --- a/robosystems_client/models/success_response_data_type_0.py +++ b/robosystems_client/models/success_response_data_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/sync_connection_request.py b/robosystems_client/models/sync_connection_request.py index 32189dc..44032cd 100644 --- a/robosystems_client/models/sync_connection_request.py +++ b/robosystems_client/models/sync_connection_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -20,12 +22,12 @@ class SyncConnectionRequest: """Request to sync a connection. Attributes: - full_sync (Union[Unset, bool]): Perform full sync vs incremental Default: False. - sync_options (Union['SyncConnectionRequestSyncOptionsType0', None, Unset]): Provider-specific sync options + full_sync (bool | Unset): Perform full sync vs incremental Default: False. + sync_options (None | SyncConnectionRequestSyncOptionsType0 | Unset): Provider-specific sync options """ - full_sync: Union[Unset, bool] = False - sync_options: Union["SyncConnectionRequestSyncOptionsType0", None, Unset] = UNSET + full_sync: bool | Unset = False + sync_options: None | SyncConnectionRequestSyncOptionsType0 | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -35,7 +37,7 @@ def to_dict(self) -> dict[str, Any]: full_sync = self.full_sync - sync_options: Union[None, Unset, dict[str, Any]] + sync_options: dict[str, Any] | None | Unset if isinstance(self.sync_options, Unset): sync_options = UNSET elif isinstance(self.sync_options, SyncConnectionRequestSyncOptionsType0): @@ -64,7 +66,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: def _parse_sync_options( data: object, - ) -> Union["SyncConnectionRequestSyncOptionsType0", None, Unset]: + ) -> None | SyncConnectionRequestSyncOptionsType0 | Unset: if data is None: return data if isinstance(data, Unset): @@ -75,9 +77,9 @@ def _parse_sync_options( sync_options_type_0 = SyncConnectionRequestSyncOptionsType0.from_dict(data) return sync_options_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["SyncConnectionRequestSyncOptionsType0", None, Unset], data) + return cast(None | SyncConnectionRequestSyncOptionsType0 | Unset, data) sync_options = _parse_sync_options(d.pop("sync_options", UNSET)) diff --git a/robosystems_client/models/sync_connection_request_sync_options_type_0.py b/robosystems_client/models/sync_connection_request_sync_options_type_0.py index 9a027f0..9cfa0e3 100644 --- a/robosystems_client/models/sync_connection_request_sync_options_type_0.py +++ b/robosystems_client/models/sync_connection_request_sync_options_type_0.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/sync_connection_response_syncconnection.py b/robosystems_client/models/sync_connection_response_syncconnection.py index 7e8859e..6c1efa2 100644 --- a/robosystems_client/models/sync_connection_response_syncconnection.py +++ b/robosystems_client/models/sync_connection_response_syncconnection.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/table_info.py b/robosystems_client/models/table_info.py index b9dfb5e..01d17a7 100644 --- a/robosystems_client/models/table_info.py +++ b/robosystems_client/models/table_info.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -15,16 +17,16 @@ class TableInfo: Attributes: table_name (str): Table name row_count (int): Approximate row count - file_count (Union[Unset, int]): Number of files Default: 0. - total_size_bytes (Union[Unset, int]): Total size in bytes Default: 0. - s3_location (Union[None, Unset, str]): S3 location for external tables + file_count (int | Unset): Number of files Default: 0. + total_size_bytes (int | Unset): Total size in bytes Default: 0. + s3_location (None | str | Unset): S3 location for external tables """ table_name: str row_count: int - file_count: Union[Unset, int] = 0 - total_size_bytes: Union[Unset, int] = 0 - s3_location: Union[None, Unset, str] = UNSET + file_count: int | Unset = 0 + total_size_bytes: int | Unset = 0 + s3_location: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -36,7 +38,7 @@ def to_dict(self) -> dict[str, Any]: total_size_bytes = self.total_size_bytes - s3_location: Union[None, Unset, str] + s3_location: None | str | Unset if isinstance(self.s3_location, Unset): s3_location = UNSET else: @@ -70,12 +72,12 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: total_size_bytes = d.pop("total_size_bytes", UNSET) - def _parse_s3_location(data: object) -> Union[None, Unset, str]: + def _parse_s3_location(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) s3_location = _parse_s3_location(d.pop("s3_location", UNSET)) diff --git a/robosystems_client/models/table_ingest_result.py b/robosystems_client/models/table_ingest_result.py deleted file mode 100644 index b21966b..0000000 --- a/robosystems_client/models/table_ingest_result.py +++ /dev/null @@ -1,107 +0,0 @@ -from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -from ..types import UNSET, Unset - -T = TypeVar("T", bound="TableIngestResult") - - -@_attrs_define -class TableIngestResult: - """ - Attributes: - table_name (str): Table name - status (str): Ingestion status (success/failed/skipped) - rows_ingested (Union[Unset, int]): Number of rows ingested Default: 0. - execution_time_ms (Union[Unset, float]): Ingestion time in milliseconds Default: 0.0. - error (Union[None, Unset, str]): Error message if failed - """ - - table_name: str - status: str - rows_ingested: Union[Unset, int] = 0 - execution_time_ms: Union[Unset, float] = 0.0 - error: Union[None, Unset, str] = UNSET - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - table_name = self.table_name - - status = self.status - - rows_ingested = self.rows_ingested - - execution_time_ms = self.execution_time_ms - - error: Union[None, Unset, str] - if isinstance(self.error, Unset): - error = UNSET - else: - error = self.error - - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - field_dict.update( - { - "table_name": table_name, - "status": status, - } - ) - if rows_ingested is not UNSET: - field_dict["rows_ingested"] = rows_ingested - if execution_time_ms is not UNSET: - field_dict["execution_time_ms"] = execution_time_ms - if error is not UNSET: - field_dict["error"] = error - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - d = dict(src_dict) - table_name = d.pop("table_name") - - status = d.pop("status") - - rows_ingested = d.pop("rows_ingested", UNSET) - - execution_time_ms = d.pop("execution_time_ms", UNSET) - - def _parse_error(data: object) -> Union[None, Unset, str]: - if data is None: - return data - if isinstance(data, Unset): - return data - return cast(Union[None, Unset, str], data) - - error = _parse_error(d.pop("error", UNSET)) - - table_ingest_result = cls( - table_name=table_name, - status=status, - rows_ingested=rows_ingested, - execution_time_ms=execution_time_ms, - error=error, - ) - - table_ingest_result.additional_properties = d - return table_ingest_result - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/robosystems_client/models/table_list_response.py b/robosystems_client/models/table_list_response.py index c2782e8..9a1c817 100644 --- a/robosystems_client/models/table_list_response.py +++ b/robosystems_client/models/table_list_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import TYPE_CHECKING, Any, TypeVar @@ -15,11 +17,11 @@ class TableListResponse: """ Attributes: - tables (list['TableInfo']): List of tables + tables (list[TableInfo]): List of tables total_count (int): Total number of tables """ - tables: list["TableInfo"] + tables: list[TableInfo] total_count: int additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) diff --git a/robosystems_client/models/table_query_request.py b/robosystems_client/models/table_query_request.py index a239c91..bc87a1c 100644 --- a/robosystems_client/models/table_query_request.py +++ b/robosystems_client/models/table_query_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define @@ -14,17 +16,17 @@ class TableQueryRequest: Attributes: sql (str): SQL query to execute on staging tables. Use ? placeholders or $param_name for dynamic values to prevent SQL injection. - parameters (Union[None, Unset, list[Any]]): Query parameters for safe value substitution. ALWAYS use parameters + parameters (list[Any] | None | Unset): Query parameters for safe value substitution. ALWAYS use parameters instead of string concatenation. """ sql: str - parameters: Union[None, Unset, list[Any]] = UNSET + parameters: list[Any] | None | Unset = UNSET def to_dict(self) -> dict[str, Any]: sql = self.sql - parameters: Union[None, Unset, list[Any]] + parameters: list[Any] | None | Unset if isinstance(self.parameters, Unset): parameters = UNSET elif isinstance(self.parameters, list): @@ -50,7 +52,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) sql = d.pop("sql") - def _parse_parameters(data: object) -> Union[None, Unset, list[Any]]: + def _parse_parameters(data: object) -> list[Any] | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -61,9 +63,9 @@ def _parse_parameters(data: object) -> Union[None, Unset, list[Any]]: parameters_type_0 = cast(list[Any], data) return parameters_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union[None, Unset, list[Any]], data) + return cast(list[Any] | None | Unset, data) parameters = _parse_parameters(d.pop("parameters", UNSET)) diff --git a/robosystems_client/models/table_query_response.py b/robosystems_client/models/table_query_response.py index e957cda..6ae9f31 100644 --- a/robosystems_client/models/table_query_response.py +++ b/robosystems_client/models/table_query_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar, cast diff --git a/robosystems_client/models/token_pricing.py b/robosystems_client/models/token_pricing.py index cdeef85..0325bb1 100644 --- a/robosystems_client/models/token_pricing.py +++ b/robosystems_client/models/token_pricing.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/transaction_summary_response.py b/robosystems_client/models/transaction_summary_response.py index 693cc0c..5452a8a 100644 --- a/robosystems_client/models/transaction_summary_response.py +++ b/robosystems_client/models/transaction_summary_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -18,16 +20,16 @@ class TransactionSummaryResponse: total_amount (float): transaction_count (int): average_amount (float): - first_transaction (Union[None, Unset, str]): - last_transaction (Union[None, Unset, str]): + first_transaction (None | str | Unset): + last_transaction (None | str | Unset): """ operation_type: str total_amount: float transaction_count: int average_amount: float - first_transaction: Union[None, Unset, str] = UNSET - last_transaction: Union[None, Unset, str] = UNSET + first_transaction: None | str | Unset = UNSET + last_transaction: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -39,13 +41,13 @@ def to_dict(self) -> dict[str, Any]: average_amount = self.average_amount - first_transaction: Union[None, Unset, str] + first_transaction: None | str | Unset if isinstance(self.first_transaction, Unset): first_transaction = UNSET else: first_transaction = self.first_transaction - last_transaction: Union[None, Unset, str] + last_transaction: None | str | Unset if isinstance(self.last_transaction, Unset): last_transaction = UNSET else: @@ -79,21 +81,21 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: average_amount = d.pop("average_amount") - def _parse_first_transaction(data: object) -> Union[None, Unset, str]: + def _parse_first_transaction(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) first_transaction = _parse_first_transaction(d.pop("first_transaction", UNSET)) - def _parse_last_transaction(data: object) -> Union[None, Unset, str]: + def _parse_last_transaction(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) last_transaction = _parse_last_transaction(d.pop("last_transaction", UNSET)) diff --git a/robosystems_client/models/upcoming_invoice.py b/robosystems_client/models/upcoming_invoice.py index d32015c..bda7a9b 100644 --- a/robosystems_client/models/upcoming_invoice.py +++ b/robosystems_client/models/upcoming_invoice.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -22,16 +24,16 @@ class UpcomingInvoice: currency (str): Currency code period_start (str): Billing period start period_end (str): Billing period end - line_items (list['InvoiceLineItem']): Estimated line items - subscription_id (Union[None, Unset, str]): Associated subscription ID + line_items (list[InvoiceLineItem]): Estimated line items + subscription_id (None | str | Unset): Associated subscription ID """ amount_due: int currency: str period_start: str period_end: str - line_items: list["InvoiceLineItem"] - subscription_id: Union[None, Unset, str] = UNSET + line_items: list[InvoiceLineItem] + subscription_id: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -48,7 +50,7 @@ def to_dict(self) -> dict[str, Any]: line_items_item = line_items_item_data.to_dict() line_items.append(line_items_item) - subscription_id: Union[None, Unset, str] + subscription_id: None | str | Unset if isinstance(self.subscription_id, Unset): subscription_id = UNSET else: @@ -90,12 +92,12 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: line_items.append(line_items_item) - def _parse_subscription_id(data: object) -> Union[None, Unset, str]: + def _parse_subscription_id(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) subscription_id = _parse_subscription_id(d.pop("subscription_id", UNSET)) diff --git a/robosystems_client/models/update_api_key_request.py b/robosystems_client/models/update_api_key_request.py index df939a6..74d0205 100644 --- a/robosystems_client/models/update_api_key_request.py +++ b/robosystems_client/models/update_api_key_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -14,22 +16,22 @@ class UpdateAPIKeyRequest: """Request model for updating an API key. Attributes: - name (Union[None, Unset, str]): New name for the API key - description (Union[None, Unset, str]): New description + name (None | str | Unset): New name for the API key + description (None | str | Unset): New description """ - name: Union[None, Unset, str] = UNSET - description: Union[None, Unset, str] = UNSET + name: None | str | Unset = UNSET + description: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: - name: Union[None, Unset, str] + name: None | str | Unset if isinstance(self.name, Unset): name = UNSET else: name = self.name - description: Union[None, Unset, str] + description: None | str | Unset if isinstance(self.description, Unset): description = UNSET else: @@ -49,21 +51,21 @@ def to_dict(self) -> dict[str, Any]: def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) - def _parse_name(data: object) -> Union[None, Unset, str]: + def _parse_name(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) name = _parse_name(d.pop("name", UNSET)) - def _parse_description(data: object) -> Union[None, Unset, str]: + def _parse_description(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) description = _parse_description(d.pop("description", UNSET)) diff --git a/robosystems_client/models/update_file_status_response_updatefilestatus.py b/robosystems_client/models/update_file_response_updatefile.py similarity index 76% rename from robosystems_client/models/update_file_status_response_updatefilestatus.py rename to robosystems_client/models/update_file_response_updatefile.py index 57e3960..a2be858 100644 --- a/robosystems_client/models/update_file_status_response_updatefilestatus.py +++ b/robosystems_client/models/update_file_response_updatefile.py @@ -1,14 +1,16 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field -T = TypeVar("T", bound="UpdateFileStatusResponseUpdatefilestatus") +T = TypeVar("T", bound="UpdateFileResponseUpdatefile") @_attrs_define -class UpdateFileStatusResponseUpdatefilestatus: +class UpdateFileResponseUpdatefile: """ """ additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) @@ -22,10 +24,10 @@ def to_dict(self) -> dict[str, Any]: @classmethod def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) - update_file_status_response_updatefilestatus = cls() + update_file_response_updatefile = cls() - update_file_status_response_updatefilestatus.additional_properties = d - return update_file_status_response_updatefilestatus + update_file_response_updatefile.additional_properties = d + return update_file_response_updatefile @property def additional_keys(self) -> list[str]: diff --git a/robosystems_client/models/update_member_role_request.py b/robosystems_client/models/update_member_role_request.py index bbaaf2e..d4955f9 100644 --- a/robosystems_client/models/update_member_role_request.py +++ b/robosystems_client/models/update_member_role_request.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/update_org_request.py b/robosystems_client/models/update_org_request.py index c0b1258..064a8ec 100644 --- a/robosystems_client/models/update_org_request.py +++ b/robosystems_client/models/update_org_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -15,22 +17,22 @@ class UpdateOrgRequest: """Request to update organization details. Attributes: - name (Union[None, Unset, str]): - org_type (Union[None, OrgType, Unset]): + name (None | str | Unset): + org_type (None | OrgType | Unset): """ - name: Union[None, Unset, str] = UNSET - org_type: Union[None, OrgType, Unset] = UNSET + name: None | str | Unset = UNSET + org_type: None | OrgType | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: - name: Union[None, Unset, str] + name: None | str | Unset if isinstance(self.name, Unset): name = UNSET else: name = self.name - org_type: Union[None, Unset, str] + org_type: None | str | Unset if isinstance(self.org_type, Unset): org_type = UNSET elif isinstance(self.org_type, OrgType): @@ -52,16 +54,16 @@ def to_dict(self) -> dict[str, Any]: def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) - def _parse_name(data: object) -> Union[None, Unset, str]: + def _parse_name(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) name = _parse_name(d.pop("name", UNSET)) - def _parse_org_type(data: object) -> Union[None, OrgType, Unset]: + def _parse_org_type(data: object) -> None | OrgType | Unset: if data is None: return data if isinstance(data, Unset): @@ -72,9 +74,9 @@ def _parse_org_type(data: object) -> Union[None, OrgType, Unset]: org_type_type_0 = OrgType(data) return org_type_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union[None, OrgType, Unset], data) + return cast(None | OrgType | Unset, data) org_type = _parse_org_type(d.pop("org_type", UNSET)) diff --git a/robosystems_client/models/update_password_request.py b/robosystems_client/models/update_password_request.py index 493b775..c26a75b 100644 --- a/robosystems_client/models/update_password_request.py +++ b/robosystems_client/models/update_password_request.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/update_user_request.py b/robosystems_client/models/update_user_request.py index fa5e188..c04b1d3 100644 --- a/robosystems_client/models/update_user_request.py +++ b/robosystems_client/models/update_user_request.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -14,22 +16,22 @@ class UpdateUserRequest: """Request model for updating user profile. Attributes: - name (Union[None, Unset, str]): User's display name - email (Union[None, Unset, str]): User's email address + name (None | str | Unset): User's display name + email (None | str | Unset): User's email address """ - name: Union[None, Unset, str] = UNSET - email: Union[None, Unset, str] = UNSET + name: None | str | Unset = UNSET + email: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: - name: Union[None, Unset, str] + name: None | str | Unset if isinstance(self.name, Unset): name = UNSET else: name = self.name - email: Union[None, Unset, str] + email: None | str | Unset if isinstance(self.email, Unset): email = UNSET else: @@ -49,21 +51,21 @@ def to_dict(self) -> dict[str, Any]: def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) - def _parse_name(data: object) -> Union[None, Unset, str]: + def _parse_name(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) name = _parse_name(d.pop("name", UNSET)) - def _parse_email(data: object) -> Union[None, Unset, str]: + def _parse_email(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) email = _parse_email(d.pop("email", UNSET)) diff --git a/robosystems_client/models/upgrade_subscription_request.py b/robosystems_client/models/upgrade_subscription_request.py index 832ad7a..614b26d 100644 --- a/robosystems_client/models/upgrade_subscription_request.py +++ b/robosystems_client/models/upgrade_subscription_request.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, TypeVar diff --git a/robosystems_client/models/user_graphs_response.py b/robosystems_client/models/user_graphs_response.py index 9c22a62..ae7a288 100644 --- a/robosystems_client/models/user_graphs_response.py +++ b/robosystems_client/models/user_graphs_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -18,12 +20,12 @@ class UserGraphsResponse: """User graphs response model. Attributes: - graphs (list['GraphInfo']): List of accessible graphs - selected_graph_id (Union[None, Unset, str]): Currently selected graph ID + graphs (list[GraphInfo]): List of accessible graphs + selected_graph_id (None | str | Unset): Currently selected graph ID """ - graphs: list["GraphInfo"] - selected_graph_id: Union[None, Unset, str] = UNSET + graphs: list[GraphInfo] + selected_graph_id: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -32,7 +34,7 @@ def to_dict(self) -> dict[str, Any]: graphs_item = graphs_item_data.to_dict() graphs.append(graphs_item) - selected_graph_id: Union[None, Unset, str] + selected_graph_id: None | str | Unset if isinstance(self.selected_graph_id, Unset): selected_graph_id = UNSET else: @@ -62,12 +64,12 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: graphs.append(graphs_item) - def _parse_selected_graph_id(data: object) -> Union[None, Unset, str]: + def _parse_selected_graph_id(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) selected_graph_id = _parse_selected_graph_id(d.pop("selectedGraphId", UNSET)) diff --git a/robosystems_client/models/user_response.py b/robosystems_client/models/user_response.py index e93cbd4..66ae7a4 100644 --- a/robosystems_client/models/user_response.py +++ b/robosystems_client/models/user_response.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -24,33 +26,33 @@ class UserResponse: Attributes: id (str): Unique identifier for the user - name (Union[None, Unset, str]): User's display name - email (Union[None, Unset, str]): User's email address - accounts (Union[Unset, list['AccountInfo']]): User's authentication accounts + name (None | str | Unset): User's display name + email (None | str | Unset): User's email address + accounts (list[AccountInfo] | Unset): User's authentication accounts """ id: str - name: Union[None, Unset, str] = UNSET - email: Union[None, Unset, str] = UNSET - accounts: Union[Unset, list["AccountInfo"]] = UNSET + name: None | str | Unset = UNSET + email: None | str | Unset = UNSET + accounts: list[AccountInfo] | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: id = self.id - name: Union[None, Unset, str] + name: None | str | Unset if isinstance(self.name, Unset): name = UNSET else: name = self.name - email: Union[None, Unset, str] + email: None | str | Unset if isinstance(self.email, Unset): email = UNSET else: email = self.email - accounts: Union[Unset, list[dict[str, Any]]] = UNSET + accounts: list[dict[str, Any]] | Unset = UNSET if not isinstance(self.accounts, Unset): accounts = [] for accounts_item_data in self.accounts: @@ -80,30 +82,32 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) id = d.pop("id") - def _parse_name(data: object) -> Union[None, Unset, str]: + def _parse_name(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) name = _parse_name(d.pop("name", UNSET)) - def _parse_email(data: object) -> Union[None, Unset, str]: + def _parse_email(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) email = _parse_email(d.pop("email", UNSET)) - accounts = [] _accounts = d.pop("accounts", UNSET) - for accounts_item_data in _accounts or []: - accounts_item = AccountInfo.from_dict(accounts_item_data) + accounts: list[AccountInfo] | Unset = UNSET + if _accounts is not UNSET: + accounts = [] + for accounts_item_data in _accounts: + accounts_item = AccountInfo.from_dict(accounts_item_data) - accounts.append(accounts_item) + accounts.append(accounts_item) user_response = cls( id=id, diff --git a/robosystems_client/models/validation_error.py b/robosystems_client/models/validation_error.py index 9f656b7..73565dd 100644 --- a/robosystems_client/models/validation_error.py +++ b/robosystems_client/models/validation_error.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -11,12 +13,12 @@ class ValidationError: """ Attributes: - loc (list[Union[int, str]]): + loc (list[int | str]): msg (str): type_ (str): """ - loc: list[Union[int, str]] + loc: list[int | str] msg: str type_: str additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) @@ -24,7 +26,7 @@ class ValidationError: def to_dict(self) -> dict[str, Any]: loc = [] for loc_item_data in self.loc: - loc_item: Union[int, str] + loc_item: int | str loc_item = loc_item_data loc.append(loc_item) @@ -51,8 +53,8 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: _loc = d.pop("loc") for loc_item_data in _loc: - def _parse_loc_item(data: object) -> Union[int, str]: - return cast(Union[int, str], data) + def _parse_loc_item(data: object) -> int | str: + return cast(int | str, data) loc_item = _parse_loc_item(loc_item_data) diff --git a/robosystems_client/models/view_axis_config.py b/robosystems_client/models/view_axis_config.py new file mode 100644 index 0000000..e24c43d --- /dev/null +++ b/robosystems_client/models/view_axis_config.py @@ -0,0 +1,276 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +if TYPE_CHECKING: + from ..models.view_axis_config_element_labels_type_0 import ( + ViewAxisConfigElementLabelsType0, + ) + from ..models.view_axis_config_member_labels_type_0 import ( + ViewAxisConfigMemberLabelsType0, + ) + + +T = TypeVar("T", bound="ViewAxisConfig") + + +@_attrs_define +class ViewAxisConfig: + """ + Attributes: + type_ (str): Axis type: 'element', 'period', 'dimension', 'entity' + dimension_axis (None | str | Unset): Dimension axis name for dimension-type axes + include_null_dimension (bool | Unset): Include facts where this dimension is NULL (default: false) Default: + False. + selected_members (list[str] | None | Unset): Specific members to include (e.g., ['2024-12-31', '2023-12-31']) + member_order (list[str] | None | Unset): Explicit ordering of members (overrides default sort) + member_labels (None | Unset | ViewAxisConfigMemberLabelsType0): Custom labels for members (e.g., {'2024-12-31': + 'Current Year'}) + element_order (list[str] | None | Unset): Element ordering for hierarchy display (e.g., ['us-gaap:Assets', 'us- + gaap:Cash', ...]) + element_labels (None | Unset | ViewAxisConfigElementLabelsType0): Custom labels for elements (e.g., {'us- + gaap:Cash': 'Cash and Cash Equivalents'}) + """ + + type_: str + dimension_axis: None | str | Unset = UNSET + include_null_dimension: bool | Unset = False + selected_members: list[str] | None | Unset = UNSET + member_order: list[str] | None | Unset = UNSET + member_labels: None | Unset | ViewAxisConfigMemberLabelsType0 = UNSET + element_order: list[str] | None | Unset = UNSET + element_labels: None | Unset | ViewAxisConfigElementLabelsType0 = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + from ..models.view_axis_config_element_labels_type_0 import ( + ViewAxisConfigElementLabelsType0, + ) + from ..models.view_axis_config_member_labels_type_0 import ( + ViewAxisConfigMemberLabelsType0, + ) + + type_ = self.type_ + + dimension_axis: None | str | Unset + if isinstance(self.dimension_axis, Unset): + dimension_axis = UNSET + else: + dimension_axis = self.dimension_axis + + include_null_dimension = self.include_null_dimension + + selected_members: list[str] | None | Unset + if isinstance(self.selected_members, Unset): + selected_members = UNSET + elif isinstance(self.selected_members, list): + selected_members = self.selected_members + + else: + selected_members = self.selected_members + + member_order: list[str] | None | Unset + if isinstance(self.member_order, Unset): + member_order = UNSET + elif isinstance(self.member_order, list): + member_order = self.member_order + + else: + member_order = self.member_order + + member_labels: dict[str, Any] | None | Unset + if isinstance(self.member_labels, Unset): + member_labels = UNSET + elif isinstance(self.member_labels, ViewAxisConfigMemberLabelsType0): + member_labels = self.member_labels.to_dict() + else: + member_labels = self.member_labels + + element_order: list[str] | None | Unset + if isinstance(self.element_order, Unset): + element_order = UNSET + elif isinstance(self.element_order, list): + element_order = self.element_order + + else: + element_order = self.element_order + + element_labels: dict[str, Any] | None | Unset + if isinstance(self.element_labels, Unset): + element_labels = UNSET + elif isinstance(self.element_labels, ViewAxisConfigElementLabelsType0): + element_labels = self.element_labels.to_dict() + else: + element_labels = self.element_labels + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "type": type_, + } + ) + if dimension_axis is not UNSET: + field_dict["dimension_axis"] = dimension_axis + if include_null_dimension is not UNSET: + field_dict["include_null_dimension"] = include_null_dimension + if selected_members is not UNSET: + field_dict["selected_members"] = selected_members + if member_order is not UNSET: + field_dict["member_order"] = member_order + if member_labels is not UNSET: + field_dict["member_labels"] = member_labels + if element_order is not UNSET: + field_dict["element_order"] = element_order + if element_labels is not UNSET: + field_dict["element_labels"] = element_labels + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.view_axis_config_element_labels_type_0 import ( + ViewAxisConfigElementLabelsType0, + ) + from ..models.view_axis_config_member_labels_type_0 import ( + ViewAxisConfigMemberLabelsType0, + ) + + d = dict(src_dict) + type_ = d.pop("type") + + def _parse_dimension_axis(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + dimension_axis = _parse_dimension_axis(d.pop("dimension_axis", UNSET)) + + include_null_dimension = d.pop("include_null_dimension", UNSET) + + def _parse_selected_members(data: object) -> list[str] | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, list): + raise TypeError() + selected_members_type_0 = cast(list[str], data) + + return selected_members_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(list[str] | None | Unset, data) + + selected_members = _parse_selected_members(d.pop("selected_members", UNSET)) + + def _parse_member_order(data: object) -> list[str] | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, list): + raise TypeError() + member_order_type_0 = cast(list[str], data) + + return member_order_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(list[str] | None | Unset, data) + + member_order = _parse_member_order(d.pop("member_order", UNSET)) + + def _parse_member_labels( + data: object, + ) -> None | Unset | ViewAxisConfigMemberLabelsType0: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, dict): + raise TypeError() + member_labels_type_0 = ViewAxisConfigMemberLabelsType0.from_dict(data) + + return member_labels_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(None | Unset | ViewAxisConfigMemberLabelsType0, data) + + member_labels = _parse_member_labels(d.pop("member_labels", UNSET)) + + def _parse_element_order(data: object) -> list[str] | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, list): + raise TypeError() + element_order_type_0 = cast(list[str], data) + + return element_order_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(list[str] | None | Unset, data) + + element_order = _parse_element_order(d.pop("element_order", UNSET)) + + def _parse_element_labels( + data: object, + ) -> None | Unset | ViewAxisConfigElementLabelsType0: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, dict): + raise TypeError() + element_labels_type_0 = ViewAxisConfigElementLabelsType0.from_dict(data) + + return element_labels_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(None | Unset | ViewAxisConfigElementLabelsType0, data) + + element_labels = _parse_element_labels(d.pop("element_labels", UNSET)) + + view_axis_config = cls( + type_=type_, + dimension_axis=dimension_axis, + include_null_dimension=include_null_dimension, + selected_members=selected_members, + member_order=member_order, + member_labels=member_labels, + element_order=element_order, + element_labels=element_labels, + ) + + view_axis_config.additional_properties = d + return view_axis_config + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/view_axis_config_element_labels_type_0.py b/robosystems_client/models/view_axis_config_element_labels_type_0.py new file mode 100644 index 0000000..8f1638f --- /dev/null +++ b/robosystems_client/models/view_axis_config_element_labels_type_0.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="ViewAxisConfigElementLabelsType0") + + +@_attrs_define +class ViewAxisConfigElementLabelsType0: + """ """ + + additional_properties: dict[str, str] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + view_axis_config_element_labels_type_0 = cls() + + view_axis_config_element_labels_type_0.additional_properties = d + return view_axis_config_element_labels_type_0 + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> str: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: str) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/view_axis_config_member_labels_type_0.py b/robosystems_client/models/view_axis_config_member_labels_type_0.py new file mode 100644 index 0000000..ec29942 --- /dev/null +++ b/robosystems_client/models/view_axis_config_member_labels_type_0.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="ViewAxisConfigMemberLabelsType0") + + +@_attrs_define +class ViewAxisConfigMemberLabelsType0: + """ """ + + additional_properties: dict[str, str] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + view_axis_config_member_labels_type_0 = cls() + + view_axis_config_member_labels_type_0.additional_properties = d + return view_axis_config_member_labels_type_0 + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> str: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: str) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/view_config.py b/robosystems_client/models/view_config.py new file mode 100644 index 0000000..782f103 --- /dev/null +++ b/robosystems_client/models/view_config.py @@ -0,0 +1,127 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +if TYPE_CHECKING: + from ..models.view_axis_config import ViewAxisConfig + + +T = TypeVar("T", bound="ViewConfig") + + +@_attrs_define +class ViewConfig: + """ + Attributes: + rows (list[ViewAxisConfig] | Unset): Row axis configuration + columns (list[ViewAxisConfig] | Unset): Column axis configuration + values (str | Unset): Field to use for values (default: numeric_value) Default: 'numeric_value'. + aggregation_function (str | Unset): Aggregation function: sum, average, count Default: 'sum'. + fill_value (float | Unset): Value to use for missing data Default: 0.0. + """ + + rows: list[ViewAxisConfig] | Unset = UNSET + columns: list[ViewAxisConfig] | Unset = UNSET + values: str | Unset = "numeric_value" + aggregation_function: str | Unset = "sum" + fill_value: float | Unset = 0.0 + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + rows: list[dict[str, Any]] | Unset = UNSET + if not isinstance(self.rows, Unset): + rows = [] + for rows_item_data in self.rows: + rows_item = rows_item_data.to_dict() + rows.append(rows_item) + + columns: list[dict[str, Any]] | Unset = UNSET + if not isinstance(self.columns, Unset): + columns = [] + for columns_item_data in self.columns: + columns_item = columns_item_data.to_dict() + columns.append(columns_item) + + values = self.values + + aggregation_function = self.aggregation_function + + fill_value = self.fill_value + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if rows is not UNSET: + field_dict["rows"] = rows + if columns is not UNSET: + field_dict["columns"] = columns + if values is not UNSET: + field_dict["values"] = values + if aggregation_function is not UNSET: + field_dict["aggregation_function"] = aggregation_function + if fill_value is not UNSET: + field_dict["fill_value"] = fill_value + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.view_axis_config import ViewAxisConfig + + d = dict(src_dict) + _rows = d.pop("rows", UNSET) + rows: list[ViewAxisConfig] | Unset = UNSET + if _rows is not UNSET: + rows = [] + for rows_item_data in _rows: + rows_item = ViewAxisConfig.from_dict(rows_item_data) + + rows.append(rows_item) + + _columns = d.pop("columns", UNSET) + columns: list[ViewAxisConfig] | Unset = UNSET + if _columns is not UNSET: + columns = [] + for columns_item_data in _columns: + columns_item = ViewAxisConfig.from_dict(columns_item_data) + + columns.append(columns_item) + + values = d.pop("values", UNSET) + + aggregation_function = d.pop("aggregation_function", UNSET) + + fill_value = d.pop("fill_value", UNSET) + + view_config = cls( + rows=rows, + columns=columns, + values=values, + aggregation_function=aggregation_function, + fill_value=fill_value, + ) + + view_config.additional_properties = d + return view_config + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/view_source.py b/robosystems_client/models/view_source.py new file mode 100644 index 0000000..a53557a --- /dev/null +++ b/robosystems_client/models/view_source.py @@ -0,0 +1,144 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..models.view_source_type import ViewSourceType +from ..types import UNSET, Unset + +T = TypeVar("T", bound="ViewSource") + + +@_attrs_define +class ViewSource: + """ + Attributes: + type_ (ViewSourceType): + period_start (None | str | Unset): Start date for transaction aggregation (YYYY-MM-DD) + period_end (None | str | Unset): End date for transaction aggregation (YYYY-MM-DD) + fact_set_id (None | str | Unset): FactSet ID for existing facts mode + entity_id (None | str | Unset): Filter by entity (optional) + """ + + type_: ViewSourceType + period_start: None | str | Unset = UNSET + period_end: None | str | Unset = UNSET + fact_set_id: None | str | Unset = UNSET + entity_id: None | str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + type_ = self.type_.value + + period_start: None | str | Unset + if isinstance(self.period_start, Unset): + period_start = UNSET + else: + period_start = self.period_start + + period_end: None | str | Unset + if isinstance(self.period_end, Unset): + period_end = UNSET + else: + period_end = self.period_end + + fact_set_id: None | str | Unset + if isinstance(self.fact_set_id, Unset): + fact_set_id = UNSET + else: + fact_set_id = self.fact_set_id + + entity_id: None | str | Unset + if isinstance(self.entity_id, Unset): + entity_id = UNSET + else: + entity_id = self.entity_id + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "type": type_, + } + ) + if period_start is not UNSET: + field_dict["period_start"] = period_start + if period_end is not UNSET: + field_dict["period_end"] = period_end + if fact_set_id is not UNSET: + field_dict["fact_set_id"] = fact_set_id + if entity_id is not UNSET: + field_dict["entity_id"] = entity_id + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + type_ = ViewSourceType(d.pop("type")) + + def _parse_period_start(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + period_start = _parse_period_start(d.pop("period_start", UNSET)) + + def _parse_period_end(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + period_end = _parse_period_end(d.pop("period_end", UNSET)) + + def _parse_fact_set_id(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + fact_set_id = _parse_fact_set_id(d.pop("fact_set_id", UNSET)) + + def _parse_entity_id(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + entity_id = _parse_entity_id(d.pop("entity_id", UNSET)) + + view_source = cls( + type_=type_, + period_start=period_start, + period_end=period_end, + fact_set_id=fact_set_id, + entity_id=entity_id, + ) + + view_source.additional_properties = d + return view_source + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/view_source_type.py b/robosystems_client/models/view_source_type.py new file mode 100644 index 0000000..471211e --- /dev/null +++ b/robosystems_client/models/view_source_type.py @@ -0,0 +1,9 @@ +from enum import Enum + + +class ViewSourceType(str, Enum): + FACT_SET = "fact_set" + TRANSACTIONS = "transactions" + + def __str__(self) -> str: + return str(self.value) diff --git a/robosystems_client/types.py b/robosystems_client/types.py index 94b4179..e49e073 100644 --- a/robosystems_client/types.py +++ b/robosystems_client/types.py @@ -2,7 +2,7 @@ from collections.abc import Mapping, MutableMapping from http import HTTPStatus -from typing import IO, BinaryIO, Generic, Literal, Optional, TypeVar, Union +from typing import IO, BinaryIO, Generic, Literal, TypeVar from attrs import define @@ -15,13 +15,13 @@ def __bool__(self) -> Literal[False]: UNSET: Unset = Unset() # The types that `httpx.Client(files=)` can accept, copied from that library. -FileContent = Union[IO[bytes], bytes, str] -FileTypes = Union[ +FileContent = IO[bytes] | bytes | str +FileTypes = ( # (filename, file (or bytes), content_type) - tuple[Optional[str], FileContent, Optional[str]], + tuple[str | None, FileContent, str | None] # (filename, file (or bytes), content_type, headers) - tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], -] + | tuple[str | None, FileContent, str | None, Mapping[str, str]] +) RequestFiles = list[tuple[str, FileTypes]] @@ -30,8 +30,8 @@ class File: """Contains information for file uploads""" payload: BinaryIO - file_name: Optional[str] = None - mime_type: Optional[str] = None + file_name: str | None = None + mime_type: str | None = None def to_tuple(self) -> FileTypes: """Return a tuple representation that httpx will accept for multipart/form-data""" @@ -48,7 +48,7 @@ class Response(Generic[T]): status_code: HTTPStatus content: bytes headers: MutableMapping[str, str] - parsed: Optional[T] + parsed: T | None __all__ = ["UNSET", "File", "FileTypes", "RequestFiles", "Response", "Unset"]