diff --git a/.gitignore b/.gitignore
index a2aa2fe..3aa3399 100644
--- a/.gitignore
+++ b/.gitignore
@@ -298,6 +298,7 @@ local*.*
!local_spark_session.py
!local_file_system_storage_configuration.py
!local_cred_utils.py
+!src/corvus_python/storage/local_file_storage.py
test-reports/
pytest-test-results.xml
behave-test-results.xml
diff --git a/README.md b/README.md
index a66d3b6..e19563f 100644
--- a/README.md
+++ b/README.md
@@ -204,6 +204,131 @@ except EmailError as e:
---
+### `storage`
+
+Provides storage configuration abstractions for data lake operations, with implementations for local and Azure Data Lake Gen2 storage.
+
+| Component Name | Object Type | Description | Import syntax |
+|---|---|---|---|
+| DataLakeLayer | Enum | Enumeration of data lake layers: `BRONZE`, `SILVER`, `GOLD`. | from corvus_python.storage import DataLakeLayer |
+| StorageConfiguration | Class (abstract) | Base class for storage configurations. Provides a `get_full_path()` method and `storage_options` dict. | from corvus_python.storage import StorageConfiguration |
+| LocalFileSystemStorageConfiguration | Class | Storage configuration backed by the local file system. Useful for local development. | from corvus_python.storage import LocalFileSystemStorageConfiguration |
+| AzureDataLakeFileSystemPerLayerConfiguration | Class | ADLS Gen2 configuration where each data lake layer maps to a separate file system (`bronze`, `silver`, `gold`). | from corvus_python.storage import AzureDataLakeFileSystemPerLayerConfiguration |
+| AzureDataLakeSingleFileSystemConfiguration | Class | ADLS Gen2 configuration using a single file system with top-level folders for each layer. | from corvus_python.storage import AzureDataLakeSingleFileSystemConfiguration |
+
+#### Usage Example
+
+```python
+from corvus_python.storage import (
+ DataLakeLayer,
+ LocalFileSystemStorageConfiguration,
+ AzureDataLakeFileSystemPerLayerConfiguration,
+ AzureDataLakeSingleFileSystemConfiguration,
+)
+
+# Local filesystem (for development)
+local_config = LocalFileSystemStorageConfiguration(base_path="./data")
+path = local_config.get_full_path(DataLakeLayer.BRONZE, "my_database/my_table")
+# -> ./data/bronze/my_database/my_table
+
+# Azure Data Lake - separate file system per layer
+adls_per_layer = AzureDataLakeFileSystemPerLayerConfiguration(
+ storage_account_name="mystorageaccount",
+ storage_options={"account_key": "..."},
+)
+path = adls_per_layer.get_full_path(DataLakeLayer.SILVER, "my_database/my_table")
+# -> abfss://silver@mystorageaccount.dfs.core.windows.net/my_database/my_table
+
+# Azure Data Lake - single file system
+adls_single = AzureDataLakeSingleFileSystemConfiguration(
+ storage_account_name="mystorageaccount",
+ file_system_name="datalake",
+)
+path = adls_single.get_full_path(DataLakeLayer.GOLD, "my_database/my_table")
+# -> abfss://datalake@mystorageaccount.dfs.core.windows.net/gold/my_database/my_table
+```
+
+---
+
+### `repositories`
+
+Provides repository classes for reading and writing structured data across various storage backends, built on [Polars](https://pola.rs/). All repositories accept a `StorageConfiguration` to abstract over local and cloud storage.
+
+#### Supporting Data Classes
+
+| Component Name | Object Type | Description | Import syntax |
+|---|---|---|---|
+| DatabaseDefinition | Dataclass | Defines a logical database by name and a list of `TableDefinition` instances. | from corvus_python.repositories import DatabaseDefinition |
+| TableDefinition | Dataclass | Defines a table by name, Pandera schema, optional `title`, and optional `db_schema` (the SQL schema name, e.g. `dbo`, used when creating SQL views over the table in Synapse or Fabric). | from corvus_python.repositories import TableDefinition |
+
+#### `PolarsDeltaTableRepository`
+
+Manages Delta Lake tables within a specified data lake layer. Handles schema validation using Pandera and integrates with OpenTelemetry for tracing.
+
+```python
+from corvus_python.repositories import PolarsDeltaTableRepository, DatabaseDefinition, TableDefinition
+from corvus_python.storage import LocalFileSystemStorageConfiguration, DataLakeLayer
+```
+
+| Method | Description |
+|---|---|
+| `read_data(table_name)` | Reads a Delta table into a `LazyFrame`. Returns `None` if the table is empty. |
+| `overwrite_table(table_name, data, overwrite_schema=False)` | Overwrites the table after eagerly validating the full dataset against its Pandera schema. |
+| `overwrite_table_lazy(table_name, data, overwrite_schema=False)` | Overwrites the table using streaming execution. Performs schema-level validation only. |
+| `overwrite_table_with_condition(table_name, data, predicate, overwrite_schema=False)` | Overwrites only rows matching the given predicate (e.g. for partition-level updates). |
+| `append_to_table(table_name, data)` | Appends data to an existing Delta table. |
+
+#### `PolarsCsvDataRepository`
+
+Reads CSV files from a hive-partitioned path (`snapshot_time=/.csv`).
+
+```python
+from corvus_python.repositories import PolarsCsvDataRepository
+```
+
+| Method | Description |
+|---|---|
+| `load_csv(object_name, snapshot_timestamp, include_file_paths=None)` | Loads a CSV file into a `DataFrame`. Strips `.csv` suffix from `object_name` if present. |
+
+#### `PolarsExcelDataRepository`
+
+Reads Excel workbooks from a hive-partitioned path, returning all sheets as a `dict[str, DataFrame]`.
+
+```python
+from corvus_python.repositories import PolarsExcelDataRepository
+```
+
+| Method | Description |
+|---|---|
+| `load_excel(snapshot_timestamp, workbook_name, relative_path=None)` | Loads all sheets from an `.xlsx` workbook into a dict keyed by sheet name. |
+
+#### `PolarsNdJsonDataRepository`
+
+Reads Newline Delimited JSON (NDJSON) files from a partitioned path (`/snapshot_time=/.json`).
+
+```python
+from corvus_python.repositories import PolarsNdJsonDataRepository
+```
+
+| Method | Description |
+|---|---|
+| `load_ndjson(object_name, load_type, snapshot_timestamp, include_file_paths=None, schema_overrides=None, schema=None)` | Loads an NDJSON file into a `DataFrame`. Supports schema overrides and full schema specification. |
+
+#### `PolarsAzureTableRepository`
+
+Queries Azure Table Storage, returning results as Polars DataFrames. Authenticates using `DefaultAzureCredential`.
+
+```python
+from corvus_python.repositories import PolarsAzureTableRepository
+```
+
+| Method | Description |
+|---|---|
+| `query(table_name, query_filter, parameters, schema=None)` | Queries an Azure Table with an OData filter and named parameters. |
+| `get_entities_partition_key_starts_with(table_name, partition_key_prefix, schema=None)` | Retrieves all entities whose `PartitionKey` starts with the given prefix. |
+
+---
+
### `sql`
Includes utility functions for working with SQL databases via pyodbc, with AAD token-based authentication. Provides helpers for connecting to Synapse serverless SQL and Fabric SQL Analytics endpoints, managing views over Delta Lake tables, and executing DDL statements.
diff --git a/poetry.lock b/poetry.lock
index 0747dbe..d756285 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,5 +1,16 @@
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
+[[package]]
+name = "annotated-types"
+version = "0.7.0"
+description = "Reusable constraint types to use with typing.Annotated"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
+ {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
+]
+
[[package]]
name = "anyio"
version = "4.12.0"
@@ -85,6 +96,96 @@ cffi = [
{version = ">=2.0.0b1", markers = "python_version >= \"3.14\""},
]
+[[package]]
+name = "arro3-core"
+version = "0.8.0"
+description = ""
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "arro3_core-0.8.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ac43f746d5331bb37ed1b0ade18a12707b64cb85b3eb5cc1d5d7b5029f1f2c12"},
+ {file = "arro3_core-0.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29b3d1cbd2c4bac787f473d071e1eb02b71b2701a7118bb5d0a274ffbd26b16c"},
+ {file = "arro3_core-0.8.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ea28c1f0d7bd327b0116557e041151da7eca3362e1ffe8cc9f53832c808a75f8"},
+ {file = "arro3_core-0.8.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1244885a5b3aebebcfedac2c30a83a635f15d65bc9079e32c16cae07ec3b4db2"},
+ {file = "arro3_core-0.8.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6147bc60d36bce9241e5972dc344fb144eabe8cef4e2c0812eb58cdfadebeacb"},
+ {file = "arro3_core-0.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:524e2ce13ea3d2739df4d52ea03977a53d103c1fd73f0fcc6a713903ea6ad4fa"},
+ {file = "arro3_core-0.8.0-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:12a7eddb4b406a4d9343bd9d42d2bb40de0bdc4ad5f50bfe10b0836e98ac2285"},
+ {file = "arro3_core-0.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c8f36b7a064c0080d2db903d52ffe0682602f26c1d7ccc9347f846b4bfe2cad8"},
+ {file = "arro3_core-0.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e8e1230c94f55d5a9ffcd99580a0d38e9299cdf743e9d5ad7595be862b5dc21c"},
+ {file = "arro3_core-0.8.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d30a515f1f52afb54b7fe1306179a8a18c9e8c0ef6631eeec82ebab21cc07a8a"},
+ {file = "arro3_core-0.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a60a568ab9d96eb0f52670da8a0b356369d32460d30857fd60cc5c7b74e1d02"},
+ {file = "arro3_core-0.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bf2aefe40ca6e374fad1c94200f586bd22917717f275d48968846b8e5c698d5e"},
+ {file = "arro3_core-0.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:a928179451fe32564b39989ad737d769c2d0343ee71e8b3a4ebd3dd8c9d2c8f7"},
+ {file = "arro3_core-0.8.0-cp311-abi3-macosx_10_12_x86_64.whl", hash = "sha256:051b1c46b424c207b7ee2f5ae50f8f88cb79d167c3e4000adf59a0e3e3994331"},
+ {file = "arro3_core-0.8.0-cp311-abi3-macosx_11_0_arm64.whl", hash = "sha256:c6b0e0b8914e634096fb377046bfcd21420b50141394e8cc1b12d43a98df1a43"},
+ {file = "arro3_core-0.8.0-cp311-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e4c20b6a55016ecd3f37f7dadf4d13d5a03dd51b7385e8f4130931447d110700"},
+ {file = "arro3_core-0.8.0-cp311-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:90dbbde6294d7349b2713e308cd3ef284de75003e8b5ad927f1716e7062525ce"},
+ {file = "arro3_core-0.8.0-cp311-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ee6693d496ab733fce43b2e83f9f7b5147db6906b3fbeba3b2d4108ffae5fbec"},
+ {file = "arro3_core-0.8.0-cp311-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d882481e2f739fe449ca9bf724f4b33185fc48ba87dd82a26a64e6a23f5ed2f8"},
+ {file = "arro3_core-0.8.0-cp311-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:d56d08a3e08864512d343a4d75e468beba743abc3a9d139e14bf3e81d0d8d79b"},
+ {file = "arro3_core-0.8.0-cp311-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02c47e2d87f063e04c12c074f4cc66efd65fc9c6b14db7f80934827ec46c589d"},
+ {file = "arro3_core-0.8.0-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:72fa13702df4698884900e60824fecda434f61ffecb5ff0d914bf9f0afa79fe9"},
+ {file = "arro3_core-0.8.0-cp311-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:8ab0bc6ad9b449b8a939e13ce94f6cacfea1d21953d437a8aa2ff8b4622512e0"},
+ {file = "arro3_core-0.8.0-cp311-abi3-musllinux_1_2_i686.whl", hash = "sha256:975a3e3dea90789608d40c54b4176b9b72c9664a4cd2c842914ac62c489b1f06"},
+ {file = "arro3_core-0.8.0-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7c3658fda04e0816333c8dda702c777d305b581876cd4176b15877726231b350"},
+ {file = "arro3_core-0.8.0-cp311-abi3-win_amd64.whl", hash = "sha256:a988c6cb74f97df4d276d5496f8667b6d5d95311d453ef32b28fb933b5ae96c4"},
+ {file = "arro3_core-0.8.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:3cfa6b5c3981711a602c357afae1f16a6daa380cac8365100365560852e51d4a"},
+ {file = "arro3_core-0.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4438167e4c357bafe66e8716adf5a55d73d79cf31bd4f7db465491605ee4afbc"},
+ {file = "arro3_core-0.8.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5ddc9a49b04ff179e1f6281164ee88008e73a0a72a931449c24ad0f8897be220"},
+ {file = "arro3_core-0.8.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85dfb4df87cd7e9adc17798e4468d5ea4f3e5dbd7845abebe1c85bba2a092ba3"},
+ {file = "arro3_core-0.8.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d4abad932811cadc1ae3e4976c4bb797e025c2451ae551edc60cf34a807edcf"},
+ {file = "arro3_core-0.8.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c8a80c8ece04cb45328eba5667dacdef553dbe379443920f98b25d8ce3db761"},
+ {file = "arro3_core-0.8.0-cp313-cp313t-manylinux_2_24_aarch64.whl", hash = "sha256:12fc8c7133102c77661051a5e55c331a84dc58a3a8fe58fd18c38fcb61fa80d8"},
+ {file = "arro3_core-0.8.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:278f2d35b4144ef7c77a037fd68dccacd651eda462cf2e739a85043109749cd3"},
+ {file = "arro3_core-0.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b7173b44e8809eb772a8bdb51dd866edb32682aac0c80055ea8e3c79077ad8c5"},
+ {file = "arro3_core-0.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:bc69ca8cbd02a2a0d63d8278182380ba79d62c798ada8768fd700e8e5168b4c1"},
+ {file = "arro3_core-0.8.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:bc02ce82e8681d87c1d9fa27c0bc8322c982d93ba12a317dce33756cee79f285"},
+ {file = "arro3_core-0.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3e66450987724a1f71bdfa1f721486af09bd07cb86227f575805e6f94f764b4f"},
+ {file = "arro3_core-0.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:03fc7a1348a9d42f48061d45825e823985ee10c80aa509bafc0e84b10e7ecbb4"},
+ {file = "arro3_core-0.8.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:be7dd0088bbab7b528d8d754b0fa05506e26da62f4a5d2f741fe94d7548e724e"},
+ {file = "arro3_core-0.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:396496e96e4b86ac73aef32263c607c2161b878f334cf6ef954aaa74c8f1267f"},
+ {file = "arro3_core-0.8.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:828032a416906af1d84702301885098ab0bc2aa9f956f677b676161aeabeb06d"},
+ {file = "arro3_core-0.8.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87d56b263bbc747691d08b3902a5f0d77adfb180d0544f9c52d622b2b79cd21f"},
+ {file = "arro3_core-0.8.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f08c07be0ff8d893d756ba20381b4fcbdf50af3c2bcec677529664920c07cf5"},
+ {file = "arro3_core-0.8.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34b280c70fe6bd6ca4c236f552d09b51ac551dc1c24793c9142ce89087346371"},
+ {file = "arro3_core-0.8.0-cp314-cp314t-manylinux_2_24_aarch64.whl", hash = "sha256:37202b826dd9695fc775064806bc07897c04caacef9403ea9d6706635f95ebdd"},
+ {file = "arro3_core-0.8.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b70530b95d36e1409023f7bde3e9aeb75e3048852beb44263d98685c9f0d8f37"},
+ {file = "arro3_core-0.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:faf03d46e0a1817bf3959c21f2ca4d2bd2d61277b5319439df3044082e10effa"},
+ {file = "arro3_core-0.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:7a120ee05477c7e28565ce0b7572413a093745bb314195c4206c0ef578abea1b"},
+ {file = "arro3_core-0.8.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a19842cfa196f07c7fd7398d08eec5bdeed331b522dcbbf9d53830180f8d6d66"},
+ {file = "arro3_core-0.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d6ceab802cc609498e47dc214967d282af8c3104c7a83aff008739192cf821e8"},
+ {file = "arro3_core-0.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:355e22a8845cbc6379e705f71a08c9cdaab6a7facc63a863e43ee5dc56ed7976"},
+ {file = "arro3_core-0.8.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8e44f93f3a4295b33fcde8c8e7dd65cfab5f3a6996f6f8f76bcaba6097a72eb0"},
+ {file = "arro3_core-0.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b90edee9e259245cdd18bf38ebc7bec70c55f609722ba0f2faaa73bac47f21d"},
+ {file = "arro3_core-0.8.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:136a51f721fbb98cf27f40d99634a91b38bf34faf424ed0f1f9f3f0fba5073ea"},
+ {file = "arro3_core-0.8.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:223fc71ff6a7146babefe341a4b77aa122e2f594742f68b7c81fdef3938d24f3"},
+ {file = "arro3_core-0.8.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8626e3e4d2009836f0e9ef6b61713f383fb1a13494cbe48719ccc1e8a99048de"},
+ {file = "arro3_core-0.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6914887962eee77e2776290910afe8041b1d78f472a4e14024b79064a6dbcd1"},
+ {file = "arro3_core-0.8.0-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:484a8457240cc8ef48b0c457ab4d65c8543e1f2a9710356c7d93500919dc2666"},
+ {file = "arro3_core-0.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:043afdc505fd3b27b69a3fc241fe0fd7f14c3850e3106e1e86627bb05cbc1e66"},
+ {file = "arro3_core-0.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b5c1c44c11893a8c9e565aacd3ff3c5f508993f83f0b17ec9a6b7d5050aaf10"},
+ {file = "arro3_core-0.8.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:083135d8a67d28d8caa4928c2919030ea9f4dbd973886445c64f2f975bea550a"},
+ {file = "arro3_core-0.8.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bd53bea121a482efa8fb3a5abae0e434e10568618cc7f1ceb5c2c96b19078a29"},
+ {file = "arro3_core-0.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:53eebfd0eb4becde8c446a70cd744c8a9cc3eab7f3fffb17a54171dac201b35d"},
+ {file = "arro3_core-0.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:26d9eeef6a464e40283674152f11e9b43e9fd6f1508462647cb2de07a1e0a7fb"},
+ {file = "arro3_core-0.8.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a4b89836e3e761d6e74437e3c40b26b0b83b9be1ca4c9c15d049cd6c4791cbc9"},
+ {file = "arro3_core-0.8.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:df944e458b10262e548ca7d5b1c079238955d11ae294ae4258e73dabe494e2c8"},
+ {file = "arro3_core-0.8.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:282ec1edea03818186978ee01568e8e6d2f92bd4ef9e94c7923873e0a442aa99"},
+ {file = "arro3_core-0.8.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb254cf4fd8b10681479df88f303ec03d1f54e4689479c77bbf81df841a4bb4"},
+ {file = "arro3_core-0.8.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7699397b5892294af5a0ff3165b1aa4339bcebdbb1a51fc38bd9ffc9e283f3d0"},
+ {file = "arro3_core-0.8.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64ac061557bd150a37bb5bc4fbd46c162db5254acd6338f800e907ddc93f5422"},
+ {file = "arro3_core-0.8.0-pp311-pypy311_pp73-manylinux_2_24_aarch64.whl", hash = "sha256:a6a4212ac0555e195d7617488c030b85aa9acd0d4e0ad8da3bf18c3572f2d60a"},
+ {file = "arro3_core-0.8.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9b5f016f6af7531afb3d22f20a56adcf68073348c37c9cc196e96740f7e95a70"},
+ {file = "arro3_core-0.8.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e981a204d9b829eab1fbe1a04f0fa53f06dc4a6c9695e978d9ca0eed32925d2f"},
+ {file = "arro3_core-0.8.0-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:593c2ca6f47de78fd92abf1809b625be9c20e36b1d3160a5d79713ec7d04819f"},
+ {file = "arro3_core-0.8.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:57ff2c2761b4451c1a693f7a63d26ed1067e9d64e3670411e45998989859f3e5"},
+ {file = "arro3_core-0.8.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d285aab000ef4ad4d91597e9662298ad3ac774939e8accea96a6522815331896"},
+ {file = "arro3_core-0.8.0.tar.gz", hash = "sha256:b75d8281b87a87d3b66836bab89951ae06421970e5f880717723a93e38743f40"},
+]
+
+[package.dependencies]
+typing-extensions = {version = "*", markers = "python_full_version < \"3.12\""}
+
[[package]]
name = "arrow"
version = "1.4.0"
@@ -179,6 +280,23 @@ typing-extensions = ">=4.6.0"
aio = ["aiohttp (>=3.0)"]
tracing = ["opentelemetry-api (>=1.26,<2.0)"]
+[[package]]
+name = "azure-data-tables"
+version = "12.7.0"
+description = "Microsoft Azure Azure Data Tables Client Library for Python"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "azure_data_tables-12.7.0-py3-none-any.whl", hash = "sha256:24ed9b5690aa46c213182e32bb1b39a68dd9f526d84f447c287e3a401b437c10"},
+ {file = "azure_data_tables-12.7.0.tar.gz", hash = "sha256:b14fc94a3223a2835ff5688e17d8e107b27c7cd7c4114138f2ac81373723705d"},
+]
+
+[package.dependencies]
+azure-core = ">=1.29.4"
+isodate = ">=0.6.1"
+typing-extensions = ">=4.3.0"
+yarl = ">=1.0"
+
[[package]]
name = "azure-identity"
version = "1.25.1"
@@ -197,6 +315,26 @@ msal = ">=1.30.0"
msal-extensions = ">=1.2.0"
typing-extensions = ">=4.0.0"
+[[package]]
+name = "azure-storage-blob"
+version = "12.28.0"
+description = "Microsoft Azure Blob Storage Client Library for Python"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "azure_storage_blob-12.28.0-py3-none-any.whl", hash = "sha256:00fb1db28bf6a7b7ecaa48e3b1d5c83bfadacc5a678b77826081304bd87d6461"},
+ {file = "azure_storage_blob-12.28.0.tar.gz", hash = "sha256:e7d98ea108258d29aa0efbfd591b2e2075fa1722a2fae8699f0b3c9de11eff41"},
+]
+
+[package.dependencies]
+azure-core = ">=1.30.0"
+cryptography = ">=2.1.4"
+isodate = ">=0.6.1"
+typing-extensions = ">=4.6.0"
+
+[package.extras]
+aio = ["azure-core[aio] (>=1.30.0)"]
+
[[package]]
name = "babel"
version = "2.17.0"
@@ -873,6 +1011,30 @@ files = [
importlib_metadata = ">=1.0.0"
pyspark = ">=3.5.3,<3.6.0"
+[[package]]
+name = "deltalake"
+version = "1.5.0"
+description = "Native Delta Lake Python binding based on delta-rs with Pandas integration"
+optional = false
+python-versions = ">=3.10"
+files = [
+ {file = "deltalake-1.5.0-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b13c693989f50b3ec6e6a7ebeb3ca4ef7cb3f340b8fe8e1a0e0767319c5f0bf5"},
+ {file = "deltalake-1.5.0-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:db388bd519c327953e6ccd688f0cf132c9186362b54d0323d0d5ffeb00cfcde1"},
+ {file = "deltalake-1.5.0-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2fe5d6fe4eb20781ae593659f77a382079503c06f3525691c8fee2815de2322"},
+ {file = "deltalake-1.5.0-cp310-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7baa94c7f8234c0840627e8f2f5e3f88a02ff011a2991b8e034c187ffafcb3a0"},
+ {file = "deltalake-1.5.0-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cfc7b124dc22e885c0af413c9a3f1c4a5fd52ec78bce6fd957a78a90c7943e1b"},
+ {file = "deltalake-1.5.0-cp310-abi3-win_amd64.whl", hash = "sha256:2ad8f11a64c0477be57d310aa9b470a7c3c3ba2a4e4e86ad92c7ca3554c539f2"},
+ {file = "deltalake-1.5.0.tar.gz", hash = "sha256:cdea832ebcadd9f6ccedfcf023f244f2830152fd82b2f78b42e701989dd73b2d"},
+]
+
+[package.dependencies]
+arro3-core = ">=0.5.0"
+deprecated = ">=1.2.18"
+
+[package.extras]
+pandas = ["pandas"]
+pyarrow = ["pyarrow (>=16)"]
+
[[package]]
name = "deprecated"
version = "1.3.1"
@@ -974,6 +1136,31 @@ files = [
[package.extras]
tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"]
+[[package]]
+name = "fastexcel"
+version = "0.19.0"
+description = "A fast excel file reader for Python, written in Rust"
+optional = false
+python-versions = ">=3.10"
+files = [
+ {file = "fastexcel-0.19.0-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:68601072a0b4b4277c165b68f1055f88ef7ffe7ed6f08c1eeda0f0271e3f7da0"},
+ {file = "fastexcel-0.19.0-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:c8a87d94445678e7e3f46a6aa39d2afaee5b88a983ec3661143a6488d8955f44"},
+ {file = "fastexcel-0.19.0-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e94fc1be6642555f277af792c22a9f80ec9b4d640d9690f00abb822b6d865069"},
+ {file = "fastexcel-0.19.0-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:334f9f40cd68b5924a712b6c104949757a0b8ad8a7e3fa3f3fad1c1ebc00258b"},
+ {file = "fastexcel-0.19.0-cp310-abi3-win_amd64.whl", hash = "sha256:fbbdf9de79c3ef3572809bb187927c0dc5840968ffe513ea015a383024b7c6b0"},
+ {file = "fastexcel-0.19.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:26eb85d98087b3c13e083a1fb51a3dfcd57607865fb44d8d6db451948ef65c63"},
+ {file = "fastexcel-0.19.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:42d48b077b7ec070de6ea34c99f9a0c97e45cd767fbadd135fc30fa70de24b42"},
+ {file = "fastexcel-0.19.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3c49fac330cc306bb0bd73d96138f438441d8254eed19ca6c1800aaa9d69054"},
+ {file = "fastexcel-0.19.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75aad96c34836eca90fc6d0e061240c145795f8754424698e2aadfd634abb4cf"},
+ {file = "fastexcel-0.19.0-cp314-cp314t-win_amd64.whl", hash = "sha256:7ef8e41cb0118f90d5f9a636fcdc0e9d635938cdaa54a3182328f3d34ce9ee1a"},
+ {file = "fastexcel-0.19.0.tar.gz", hash = "sha256:216c3719ee90963bd93a0bf8c10b177233046ac975b67651152fdaedd3c99aa1"},
+]
+
+[package.extras]
+pandas = ["pandas (>=1.4.4)", "pyarrow (>=8.0.0)"]
+polars = ["polars (>=0.16.14)"]
+pyarrow = ["pyarrow (>=8.0.0)"]
+
[[package]]
name = "fastjsonschema"
version = "2.21.2"
@@ -1032,6 +1219,45 @@ files = [
{file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"},
]
+[[package]]
+name = "fsspec"
+version = "2026.2.0"
+description = "File-system specification"
+optional = false
+python-versions = ">=3.10"
+files = [
+ {file = "fsspec-2026.2.0-py3-none-any.whl", hash = "sha256:98de475b5cb3bd66bedd5c4679e87b4fdfe1a3bf4d707b151b3c07e58c9a2437"},
+ {file = "fsspec-2026.2.0.tar.gz", hash = "sha256:6544e34b16869f5aacd5b90bdf1a71acb37792ea3ddf6125ee69a22a53fb8bff"},
+]
+
+[package.extras]
+abfs = ["adlfs"]
+adl = ["adlfs"]
+arrow = ["pyarrow (>=1)"]
+dask = ["dask", "distributed"]
+dev = ["pre-commit", "ruff (>=0.5)"]
+doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"]
+dropbox = ["dropbox", "dropboxdrivefs", "requests"]
+full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs (>2024.2.0)", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs (>2024.2.0)", "smbprotocol", "tqdm"]
+fuse = ["fusepy"]
+gcs = ["gcsfs (>2024.2.0)"]
+git = ["pygit2"]
+github = ["requests"]
+gs = ["gcsfs"]
+gui = ["panel"]
+hdfs = ["pyarrow (>=1)"]
+http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"]
+libarchive = ["libarchive-c"]
+oci = ["ocifs"]
+s3 = ["s3fs (>2024.2.0)"]
+sftp = ["paramiko"]
+smb = ["smbprotocol"]
+ssh = ["paramiko"]
+test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"]
+test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"]
+test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "backports-zstd", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas (<3.0.0)", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"]
+tqdm = ["tqdm"]
+
[[package]]
name = "h11"
version = "0.16.0"
@@ -2092,6 +2318,164 @@ msal = ">=1.29,<2"
[package.extras]
portalocker = ["portalocker (>=1.4,<4)"]
+[[package]]
+name = "multidict"
+version = "6.7.1"
+description = "multidict implementation"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "multidict-6.7.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c93c3db7ea657dd4637d57e74ab73de31bccefe144d3d4ce370052035bc85fb5"},
+ {file = "multidict-6.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:974e72a2474600827abaeda71af0c53d9ebbc3c2eb7da37b37d7829ae31232d8"},
+ {file = "multidict-6.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdea2e7b2456cfb6694fb113066fd0ec7ea4d67e3a35e1f4cbeea0b448bf5872"},
+ {file = "multidict-6.7.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:17207077e29342fdc2c9a82e4b306f1127bf1ea91f8b71e02d4798a70bb99991"},
+ {file = "multidict-6.7.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d4f49cb5661344764e4c7c7973e92a47a59b8fc19b6523649ec9dc4960e58a03"},
+ {file = "multidict-6.7.1-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a9fc4caa29e2e6ae408d1c450ac8bf19892c5fca83ee634ecd88a53332c59981"},
+ {file = "multidict-6.7.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c5f0c21549ab432b57dcc82130f388d84ad8179824cc3f223d5e7cfbfd4143f6"},
+ {file = "multidict-6.7.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7dfb78d966b2c906ae1d28ccf6e6712a3cd04407ee5088cd276fe8cb42186190"},
+ {file = "multidict-6.7.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9b0d9b91d1aa44db9c1f1ecd0d9d2ae610b2f4f856448664e01a3b35899f3f92"},
+ {file = "multidict-6.7.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:dd96c01a9dcd4889dcfcf9eb5544ca0c77603f239e3ffab0524ec17aea9a93ee"},
+ {file = "multidict-6.7.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:067343c68cd6612d375710f895337b3a98a033c94f14b9a99eff902f205424e2"},
+ {file = "multidict-6.7.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5884a04f4ff56c6120f6ccf703bdeb8b5079d808ba604d4d53aec0d55dc33568"},
+ {file = "multidict-6.7.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8affcf1c98b82bc901702eb73b6947a1bfa170823c153fe8a47b5f5f02e48e40"},
+ {file = "multidict-6.7.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:0d17522c37d03e85c8098ec8431636309b2682cf12e58f4dbc76121fb50e4962"},
+ {file = "multidict-6.7.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24c0cf81544ca5e17cfcb6e482e7a82cd475925242b308b890c9452a074d4505"},
+ {file = "multidict-6.7.1-cp310-cp310-win32.whl", hash = "sha256:d82dd730a95e6643802f4454b8fdecdf08667881a9c5670db85bc5a56693f122"},
+ {file = "multidict-6.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:cf37cbe5ced48d417ba045aca1b21bafca67489452debcde94778a576666a1df"},
+ {file = "multidict-6.7.1-cp310-cp310-win_arm64.whl", hash = "sha256:59bc83d3f66b41dac1e7460aac1d196edc70c9ba3094965c467715a70ecb46db"},
+ {file = "multidict-6.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7ff981b266af91d7b4b3793ca3382e53229088d193a85dfad6f5f4c27fc73e5d"},
+ {file = "multidict-6.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:844c5bca0b5444adb44a623fb0a1310c2f4cd41f402126bb269cd44c9b3f3e1e"},
+ {file = "multidict-6.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f2a0a924d4c2e9afcd7ec64f9de35fcd96915149b2216e1cb2c10a56df483855"},
+ {file = "multidict-6.7.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8be1802715a8e892c784c0197c2ace276ea52702a0ede98b6310c8f255a5afb3"},
+ {file = "multidict-6.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2e2d2ed645ea29f31c4c7ea1552fcfd7cb7ba656e1eafd4134a6620c9f5fdd9e"},
+ {file = "multidict-6.7.1-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:95922cee9a778659e91db6497596435777bd25ed116701a4c034f8e46544955a"},
+ {file = "multidict-6.7.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6b83cabdc375ffaaa15edd97eb7c0c672ad788e2687004990074d7d6c9b140c8"},
+ {file = "multidict-6.7.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:38fb49540705369bab8484db0689d86c0a33a0a9f2c1b197f506b71b4b6c19b0"},
+ {file = "multidict-6.7.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:439cbebd499f92e9aa6793016a8acaa161dfa749ae86d20960189f5398a19144"},
+ {file = "multidict-6.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6d3bc717b6fe763b8be3f2bee2701d3c8eb1b2a8ae9f60910f1b2860c82b6c49"},
+ {file = "multidict-6.7.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:619e5a1ac57986dbfec9f0b301d865dddf763696435e2962f6d9cf2fdff2bb71"},
+ {file = "multidict-6.7.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0b38ebffd9be37c1170d33bc0f36f4f262e0a09bc1aac1c34c7aa51a7293f0b3"},
+ {file = "multidict-6.7.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:10ae39c9cfe6adedcdb764f5e8411d4a92b055e35573a2eaa88d3323289ef93c"},
+ {file = "multidict-6.7.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:25167cc263257660290fba06b9318d2026e3c910be240a146e1f66dd114af2b0"},
+ {file = "multidict-6.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:128441d052254f42989ef98b7b6a6ecb1e6f708aa962c7984235316db59f50fa"},
+ {file = "multidict-6.7.1-cp311-cp311-win32.whl", hash = "sha256:d62b7f64ffde3b99d06b707a280db04fb3855b55f5a06df387236051d0668f4a"},
+ {file = "multidict-6.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:bdbf9f3b332abd0cdb306e7c2113818ab1e922dc84b8f8fd06ec89ed2a19ab8b"},
+ {file = "multidict-6.7.1-cp311-cp311-win_arm64.whl", hash = "sha256:b8c990b037d2fff2f4e33d3f21b9b531c5745b33a49a7d6dbe7a177266af44f6"},
+ {file = "multidict-6.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a90f75c956e32891a4eda3639ce6dd86e87105271f43d43442a3aedf3cddf172"},
+ {file = "multidict-6.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fccb473e87eaa1382689053e4a4618e7ba7b9b9b8d6adf2027ee474597128cd"},
+ {file = "multidict-6.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0fa96985700739c4c7853a43c0b3e169360d6855780021bfc6d0f1ce7c123e7"},
+ {file = "multidict-6.7.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cb2a55f408c3043e42b40cc8eecd575afa27b7e0b956dfb190de0f8499a57a53"},
+ {file = "multidict-6.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb0ce7b2a32d09892b3dd6cc44877a0d02a33241fafca5f25c8b6b62374f8b75"},
+ {file = "multidict-6.7.1-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c3a32d23520ee37bf327d1e1a656fec76a2edd5c038bf43eddfa0572ec49c60b"},
+ {file = "multidict-6.7.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9c90fed18bffc0189ba814749fdcc102b536e83a9f738a9003e569acd540a733"},
+ {file = "multidict-6.7.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:da62917e6076f512daccfbbde27f46fed1c98fee202f0559adec8ee0de67f71a"},
+ {file = "multidict-6.7.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfde23ef6ed9db7eaee6c37dcec08524cb43903c60b285b172b6c094711b3961"},
+ {file = "multidict-6.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3758692429e4e32f1ba0df23219cd0b4fc0a52f476726fff9337d1a57676a582"},
+ {file = "multidict-6.7.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:398c1478926eca669f2fd6a5856b6de9c0acf23a2cb59a14c0ba5844fa38077e"},
+ {file = "multidict-6.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c102791b1c4f3ab36ce4101154549105a53dc828f016356b3e3bcae2e3a039d3"},
+ {file = "multidict-6.7.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a088b62bd733e2ad12c50dad01b7d0166c30287c166e137433d3b410add807a6"},
+ {file = "multidict-6.7.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d51ff4785d58d3f6c91bdbffcb5e1f7ddfda557727043aa20d20ec4f65e324a"},
+ {file = "multidict-6.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc5907494fccf3e7d3f94f95c91d6336b092b5fc83811720fae5e2765890dfba"},
+ {file = "multidict-6.7.1-cp312-cp312-win32.whl", hash = "sha256:28ca5ce2fd9716631133d0e9a9b9a745ad7f60bac2bccafb56aa380fc0b6c511"},
+ {file = "multidict-6.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcee94dfbd638784645b066074b338bc9cc155d4b4bffa4adce1615c5a426c19"},
+ {file = "multidict-6.7.1-cp312-cp312-win_arm64.whl", hash = "sha256:ba0a9fb644d0c1a2194cf7ffb043bd852cea63a57f66fbd33959f7dae18517bf"},
+ {file = "multidict-6.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2b41f5fed0ed563624f1c17630cb9941cf2309d4df00e494b551b5f3e3d67a23"},
+ {file = "multidict-6.7.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84e61e3af5463c19b67ced91f6c634effb89ef8bfc5ca0267f954451ed4bb6a2"},
+ {file = "multidict-6.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:935434b9853c7c112eee7ac891bc4cb86455aa631269ae35442cb316790c1445"},
+ {file = "multidict-6.7.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:432feb25a1cb67fe82a9680b4d65fb542e4635cb3166cd9c01560651ad60f177"},
+ {file = "multidict-6.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e82d14e3c948952a1a85503817e038cba5905a3352de76b9a465075d072fba23"},
+ {file = "multidict-6.7.1-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4cfb48c6ea66c83bcaaf7e4dfa7ec1b6bbcf751b7db85a328902796dfde4c060"},
+ {file = "multidict-6.7.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1d540e51b7e8e170174555edecddbd5538105443754539193e3e1061864d444d"},
+ {file = "multidict-6.7.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:273d23f4b40f3dce4d6c8a821c741a86dec62cded82e1175ba3d99be128147ed"},
+ {file = "multidict-6.7.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d624335fd4fa1c08a53f8b4be7676ebde19cd092b3895c421045ca87895b429"},
+ {file = "multidict-6.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:12fad252f8b267cc75b66e8fc51b3079604e8d43a75428ffe193cd9e2195dfd6"},
+ {file = "multidict-6.7.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:03ede2a6ffbe8ef936b92cb4529f27f42be7f56afcdab5ab739cd5f27fb1cbf9"},
+ {file = "multidict-6.7.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:90efbcf47dbe33dcf643a1e400d67d59abeac5db07dc3f27d6bdeae497a2198c"},
+ {file = "multidict-6.7.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:5c4b9bfc148f5a91be9244d6264c53035c8a0dcd2f51f1c3c6e30e30ebaa1c84"},
+ {file = "multidict-6.7.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:401c5a650f3add2472d1d288c26deebc540f99e2fb83e9525007a74cd2116f1d"},
+ {file = "multidict-6.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:97891f3b1b3ffbded884e2916cacf3c6fc87b66bb0dde46f7357404750559f33"},
+ {file = "multidict-6.7.1-cp313-cp313-win32.whl", hash = "sha256:e1c5988359516095535c4301af38d8a8838534158f649c05dd1050222321bcb3"},
+ {file = "multidict-6.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:960c83bf01a95b12b08fd54324a4eb1d5b52c88932b5cba5d6e712bb3ed12eb5"},
+ {file = "multidict-6.7.1-cp313-cp313-win_arm64.whl", hash = "sha256:563fe25c678aaba333d5399408f5ec3c383ca5b663e7f774dd179a520b8144df"},
+ {file = "multidict-6.7.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:c76c4bec1538375dad9d452d246ca5368ad6e1c9039dadcf007ae59c70619ea1"},
+ {file = "multidict-6.7.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:57b46b24b5d5ebcc978da4ec23a819a9402b4228b8a90d9c656422b4bdd8a963"},
+ {file = "multidict-6.7.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e954b24433c768ce78ab7929e84ccf3422e46deb45a4dc9f93438f8217fa2d34"},
+ {file = "multidict-6.7.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3bd231490fa7217cc832528e1cd8752a96f0125ddd2b5749390f7c3ec8721b65"},
+ {file = "multidict-6.7.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:253282d70d67885a15c8a7716f3a73edf2d635793ceda8173b9ecc21f2fb8292"},
+ {file = "multidict-6.7.1-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0b4c48648d7649c9335cf1927a8b87fa692de3dcb15faa676c6a6f1f1aabda43"},
+ {file = "multidict-6.7.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:98bc624954ec4d2c7cb074b8eefc2b5d0ce7d482e410df446414355d158fe4ca"},
+ {file = "multidict-6.7.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1b99af4d9eec0b49927b4402bcbb58dea89d3e0db8806a4086117019939ad3dd"},
+ {file = "multidict-6.7.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6aac4f16b472d5b7dc6f66a0d49dd57b0e0902090be16594dc9ebfd3d17c47e7"},
+ {file = "multidict-6.7.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:21f830fe223215dffd51f538e78c172ed7c7f60c9b96a2bf05c4848ad49921c3"},
+ {file = "multidict-6.7.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f5dd81c45b05518b9aa4da4aa74e1c93d715efa234fd3e8a179df611cc85e5f4"},
+ {file = "multidict-6.7.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:eb304767bca2bb92fb9c5bd33cedc95baee5bb5f6c88e63706533a1c06ad08c8"},
+ {file = "multidict-6.7.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c9035dde0f916702850ef66460bc4239d89d08df4d02023a5926e7446724212c"},
+ {file = "multidict-6.7.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:af959b9beeb66c822380f222f0e0a1889331597e81f1ded7f374f3ecb0fd6c52"},
+ {file = "multidict-6.7.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:41f2952231456154ee479651491e94118229844dd7226541788be783be2b5108"},
+ {file = "multidict-6.7.1-cp313-cp313t-win32.whl", hash = "sha256:df9f19c28adcb40b6aae30bbaa1478c389efd50c28d541d76760199fc1037c32"},
+ {file = "multidict-6.7.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d54ecf9f301853f2c5e802da559604b3e95bb7a3b01a9c295c6ee591b9882de8"},
+ {file = "multidict-6.7.1-cp313-cp313t-win_arm64.whl", hash = "sha256:5a37ca18e360377cfda1d62f5f382ff41f2b8c4ccb329ed974cc2e1643440118"},
+ {file = "multidict-6.7.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8f333ec9c5eb1b7105e3b84b53141e66ca05a19a605368c55450b6ba208cb9ee"},
+ {file = "multidict-6.7.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a407f13c188f804c759fc6a9f88286a565c242a76b27626594c133b82883b5c2"},
+ {file = "multidict-6.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0e161ddf326db5577c3a4cc2d8648f81456e8a20d40415541587a71620d7a7d1"},
+ {file = "multidict-6.7.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1e3a8bb24342a8201d178c3b4984c26ba81a577c80d4d525727427460a50c22d"},
+ {file = "multidict-6.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97231140a50f5d447d3164f994b86a0bed7cd016e2682f8650d6a9158e14fd31"},
+ {file = "multidict-6.7.1-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6b10359683bd8806a200fd2909e7c8ca3a7b24ec1d8132e483d58e791d881048"},
+ {file = "multidict-6.7.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:283ddac99f7ac25a4acadbf004cb5ae34480bbeb063520f70ce397b281859362"},
+ {file = "multidict-6.7.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:538cec1e18c067d0e6103aa9a74f9e832904c957adc260e61cd9d8cf0c3b3d37"},
+ {file = "multidict-6.7.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eee46ccb30ff48a1e35bb818cc90846c6be2b68240e42a78599166722cea709"},
+ {file = "multidict-6.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa263a02f4f2dd2d11a7b1bb4362aa7cb1049f84a9235d31adf63f30143469a0"},
+ {file = "multidict-6.7.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:2e1425e2f99ec5bd36c15a01b690a1a2456209c5deed58f95469ffb46039ccbb"},
+ {file = "multidict-6.7.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:497394b3239fc6f0e13a78a3e1b61296e72bf1c5f94b4c4eb80b265c37a131cd"},
+ {file = "multidict-6.7.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:233b398c29d3f1b9676b4b6f75c518a06fcb2ea0b925119fb2c1bc35c05e1601"},
+ {file = "multidict-6.7.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:93b1818e4a6e0930454f0f2af7dfce69307ca03cdcfb3739bf4d91241967b6c1"},
+ {file = "multidict-6.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f33dc2a3abe9249ea5d8360f969ec7f4142e7ac45ee7014d8f8d5acddf178b7b"},
+ {file = "multidict-6.7.1-cp314-cp314-win32.whl", hash = "sha256:3ab8b9d8b75aef9df299595d5388b14530839f6422333357af1339443cff777d"},
+ {file = "multidict-6.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:5e01429a929600e7dab7b166062d9bb54a5eed752384c7384c968c2afab8f50f"},
+ {file = "multidict-6.7.1-cp314-cp314-win_arm64.whl", hash = "sha256:4885cb0e817aef5d00a2e8451d4665c1808378dc27c2705f1bf4ef8505c0d2e5"},
+ {file = "multidict-6.7.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:0458c978acd8e6ea53c81eefaddbbee9c6c5e591f41b3f5e8e194780fe026581"},
+ {file = "multidict-6.7.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:c0abd12629b0af3cf590982c0b413b1e7395cd4ec026f30986818ab95bfaa94a"},
+ {file = "multidict-6.7.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:14525a5f61d7d0c94b368a42cff4c9a4e7ba2d52e2672a7b23d84dc86fb02b0c"},
+ {file = "multidict-6.7.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:17307b22c217b4cf05033dabefe68255a534d637c6c9b0cc8382718f87be4262"},
+ {file = "multidict-6.7.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a7e590ff876a3eaf1c02a4dfe0724b6e69a9e9de6d8f556816f29c496046e59"},
+ {file = "multidict-6.7.1-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5fa6a95dfee63893d80a34758cd0e0c118a30b8dcb46372bf75106c591b77889"},
+ {file = "multidict-6.7.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a0543217a6a017692aa6ae5cc39adb75e587af0f3a82288b1492eb73dd6cc2a4"},
+ {file = "multidict-6.7.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f99fe611c312b3c1c0ace793f92464d8cd263cc3b26b5721950d977b006b6c4d"},
+ {file = "multidict-6.7.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9004d8386d133b7e6135679424c91b0b854d2d164af6ea3f289f8f2761064609"},
+ {file = "multidict-6.7.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e628ef0e6859ffd8273c69412a2465c4be4a9517d07261b33334b5ec6f3c7489"},
+ {file = "multidict-6.7.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:841189848ba629c3552035a6a7f5bf3b02eb304e9fea7492ca220a8eda6b0e5c"},
+ {file = "multidict-6.7.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce1bbd7d780bb5a0da032e095c951f7014d6b0a205f8318308140f1a6aba159e"},
+ {file = "multidict-6.7.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b26684587228afed0d50cf804cc71062cc9c1cdf55051c4c6345d372947b268c"},
+ {file = "multidict-6.7.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9f9af11306994335398293f9958071019e3ab95e9a707dc1383a35613f6abcb9"},
+ {file = "multidict-6.7.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b4938326284c4f1224178a560987b6cf8b4d38458b113d9b8c1db1a836e640a2"},
+ {file = "multidict-6.7.1-cp314-cp314t-win32.whl", hash = "sha256:98655c737850c064a65e006a3df7c997cd3b220be4ec8fe26215760b9697d4d7"},
+ {file = "multidict-6.7.1-cp314-cp314t-win_amd64.whl", hash = "sha256:497bde6223c212ba11d462853cfa4f0ae6ef97465033e7dc9940cdb3ab5b48e5"},
+ {file = "multidict-6.7.1-cp314-cp314t-win_arm64.whl", hash = "sha256:2bbd113e0d4af5db41d5ebfe9ccaff89de2120578164f86a5d17d5a576d1e5b2"},
+ {file = "multidict-6.7.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:65573858d27cdeaca41893185677dc82395159aa28875a8867af66532d413a8f"},
+ {file = "multidict-6.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c524c6fb8fc342793708ab111c4dbc90ff9abd568de220432500e47e990c0358"},
+ {file = "multidict-6.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:aa23b001d968faef416ff70dc0f1ab045517b9b42a90edd3e9bcdb06479e31d5"},
+ {file = "multidict-6.7.1-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6704fa2b7453b2fb121740555fa1ee20cd98c4d011120caf4d2b8d4e7c76eec0"},
+ {file = "multidict-6.7.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:121a34e5bfa410cdf2c8c49716de160de3b1dbcd86b49656f5681e4543bcd1a8"},
+ {file = "multidict-6.7.1-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:026d264228bcd637d4e060844e39cdc60f86c479e463d49075dedc21b18fbbe0"},
+ {file = "multidict-6.7.1-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0e697826df7eb63418ee190fd06ce9f1803593bb4b9517d08c60d9b9a7f69d8f"},
+ {file = "multidict-6.7.1-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:bb08271280173720e9fea9ede98e5231defcbad90f1624bea26f32ec8a956e2f"},
+ {file = "multidict-6.7.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6b3228e1d80af737b72925ce5fb4daf5a335e49cd7ab77ed7b9fdfbf58c526e"},
+ {file = "multidict-6.7.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3943debf0fbb57bdde5901695c11094a9a36723e5c03875f87718ee15ca2f4d2"},
+ {file = "multidict-6.7.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:98c5787b0a0d9a41d9311eae44c3b76e6753def8d8870ab501320efe75a6a5f8"},
+ {file = "multidict-6.7.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:08ccb2a6dc72009093ebe7f3f073e5ec5964cba9a706fa94b1a1484039b87941"},
+ {file = "multidict-6.7.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb351f72c26dc9abe338ca7294661aa22969ad8ffe7ef7d5541d19f368dc854a"},
+ {file = "multidict-6.7.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ac1c665bad8b5d762f5f85ebe4d94130c26965f11de70c708c75671297c776de"},
+ {file = "multidict-6.7.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fa6609d0364f4f6f58351b4659a1f3e0e898ba2a8c5cac04cb2c7bc556b0bc5"},
+ {file = "multidict-6.7.1-cp39-cp39-win32.whl", hash = "sha256:6f77ce314a29263e67adadc7e7c1bc699fcb3a305059ab973d038f87caa42ed0"},
+ {file = "multidict-6.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:f537b55778cd3cbee430abe3131255d3a78202e0f9ea7ffc6ada893a4bcaeea4"},
+ {file = "multidict-6.7.1-cp39-cp39-win_arm64.whl", hash = "sha256:749aa54f578f2e5f439538706a475aa844bfa8ef75854b1401e6e528e4937cf9"},
+ {file = "multidict-6.7.1-py3-none-any.whl", hash = "sha256:55d97cc6dae627efa6a6e548885712d4864b81110ac76fa4e534c03819fa4a56"},
+ {file = "multidict-6.7.1.tar.gz", hash = "sha256:ec6652a1bee61c53a3e5776b6049172c53b6aaba34f18c9ad04f82712bac623d"},
+]
+
+[package.dependencies]
+typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""}
+
[[package]]
name = "mypy"
version = "1.19.0"
@@ -2578,6 +2962,43 @@ sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-d
test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"]
xml = ["lxml (>=4.9.2)"]
+[[package]]
+name = "pandera"
+version = "0.29.0"
+description = "A light-weight and flexible data validation and testing tool for statistical data objects."
+optional = false
+python-versions = ">=3.10"
+files = [
+ {file = "pandera-0.29.0-py3-none-any.whl", hash = "sha256:b3b25d6c00d7c100fbab96aff0e81e52d3dae543a880d24135cca705fa97c516"},
+ {file = "pandera-0.29.0.tar.gz", hash = "sha256:06bc4fc1e4ff02534dd44482a9bc704fb2e58fe3fbb11be906aa714f7f5ec801"},
+]
+
+[package.dependencies]
+packaging = ">=20.0"
+polars = {version = ">=0.20.0", optional = true, markers = "extra == \"polars\""}
+pydantic = "*"
+typeguard = "*"
+typing_extensions = "*"
+typing_inspect = ">=0.6.0"
+
+[package.extras]
+all = ["black", "dask[dataframe]", "distributed", "fastapi", "frictionless (<=4.40.8)", "geopandas (<1.1.0)", "hypothesis (>=6.92.7)", "ibis-framework (>=9.0.0)", "modin", "pandas-stubs", "polars (>=0.20.0)", "pyspark[connect] (>=3.2.0)", "pyyaml (>=5.1)", "ray", "scipy", "scipy-stubs", "shapely"]
+dask = ["dask[dataframe]", "distributed"]
+fastapi = ["fastapi"]
+frictionless = ["frictionless (<=4.40.8)"]
+geopandas = ["geopandas (<1.1.0)", "shapely"]
+hypotheses = ["scipy"]
+ibis = ["ibis-framework (>=9.0.0)"]
+io = ["pyyaml (>=5.1)"]
+modin = ["dask[dataframe]", "distributed", "modin", "ray"]
+modin-dask = ["dask[dataframe]", "distributed", "modin"]
+modin-ray = ["modin", "ray"]
+mypy = ["pandas-stubs", "scipy-stubs"]
+pandas = ["numpy (>=1.24.4)", "pandas (>=2.1.1)"]
+polars = ["polars (>=0.20.0)"]
+pyspark = ["pyspark[connect] (>=3.2.0)"]
+strategies = ["hypothesis (>=6.92.7)"]
+
[[package]]
name = "pandocfilters"
version = "1.5.1"
@@ -2691,6 +3112,67 @@ files = [
dev = ["pre-commit", "tox"]
testing = ["coverage", "pytest", "pytest-benchmark"]
+[[package]]
+name = "polars"
+version = "1.38.1"
+description = "Blazingly fast DataFrame library"
+optional = false
+python-versions = ">=3.10"
+files = [
+ {file = "polars-1.38.1-py3-none-any.whl", hash = "sha256:a29479c48fed4984d88b656486d221f638cba45d3e961631a50ee5fdde38cb2c"},
+ {file = "polars-1.38.1.tar.gz", hash = "sha256:803a2be5344ef880ad625addfb8f641995cfd777413b08a10de0897345778239"},
+]
+
+[package.dependencies]
+polars-runtime-32 = "1.38.1"
+
+[package.extras]
+adbc = ["adbc-driver-manager[dbapi]", "adbc-driver-sqlite[dbapi]"]
+all = ["polars[async,cloudpickle,database,deltalake,excel,fsspec,graph,iceberg,numpy,pandas,plot,pyarrow,pydantic,style,timezone]"]
+async = ["gevent"]
+calamine = ["fastexcel (>=0.9)"]
+cloudpickle = ["cloudpickle"]
+connectorx = ["connectorx (>=0.3.2)"]
+database = ["polars[adbc,connectorx,sqlalchemy]"]
+deltalake = ["deltalake (>=1.0.0)"]
+excel = ["polars[calamine,openpyxl,xlsx2csv,xlsxwriter]"]
+fsspec = ["fsspec"]
+gpu = ["cudf-polars-cu12"]
+graph = ["matplotlib"]
+iceberg = ["pyiceberg (>=0.7.1)"]
+numpy = ["numpy (>=1.16.0)"]
+openpyxl = ["openpyxl (>=3.0.0)"]
+pandas = ["pandas", "polars[pyarrow]"]
+plot = ["altair (>=5.4.0)"]
+polars-cloud = ["polars_cloud (>=0.4.0)"]
+pyarrow = ["pyarrow (>=7.0.0)"]
+pydantic = ["pydantic"]
+rt64 = ["polars-runtime-64 (==1.38.1)"]
+rtcompat = ["polars-runtime-compat (==1.38.1)"]
+sqlalchemy = ["polars[pandas]", "sqlalchemy"]
+style = ["great-tables (>=0.8.0)"]
+timezone = ["tzdata"]
+xlsx2csv = ["xlsx2csv (>=0.8.0)"]
+xlsxwriter = ["xlsxwriter"]
+
+[[package]]
+name = "polars-runtime-32"
+version = "1.38.1"
+description = "Blazingly fast DataFrame library"
+optional = false
+python-versions = ">=3.10"
+files = [
+ {file = "polars_runtime_32-1.38.1-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:18154e96044724a0ac38ce155cf63aa03c02dd70500efbbf1a61b08cadd269ef"},
+ {file = "polars_runtime_32-1.38.1-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:c49acac34cc4049ed188f1eb67d6ff3971a39b4af7f7b734b367119970f313ac"},
+ {file = "polars_runtime_32-1.38.1-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fef2ef2626a954e010e006cc8e4de467ecf32d08008f130cea1c78911f545323"},
+ {file = "polars_runtime_32-1.38.1-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8a5f7a8125e2d50e2e060296551c929aec09be23a9edcb2b12ca923f555a5ba"},
+ {file = "polars_runtime_32-1.38.1-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:10d19cd9863e129273b18b7fcaab625b5c8143c2d22b3e549067b78efa32e4fa"},
+ {file = "polars_runtime_32-1.38.1-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61e8d73c614b46a00d2f853625a7569a2e4a0999333e876354ac81d1bf1bb5e2"},
+ {file = "polars_runtime_32-1.38.1-cp310-abi3-win_amd64.whl", hash = "sha256:08c2b3b93509c1141ac97891294ff5c5b0c548a373f583eaaea873a4bf506437"},
+ {file = "polars_runtime_32-1.38.1-cp310-abi3-win_arm64.whl", hash = "sha256:6d07d0cc832bfe4fb54b6e04218c2c27afcfa6b9498f9f6bbf262a00d58cc7c4"},
+ {file = "polars_runtime_32-1.38.1.tar.gz", hash = "sha256:04f20ed1f5c58771f34296a27029dc755a9e4b1390caeaef8f317e06fdfce2ec"},
+]
+
[[package]]
name = "prometheus-client"
version = "0.23.1"
@@ -2719,6 +3201,137 @@ files = [
[package.dependencies]
wcwidth = "*"
+[[package]]
+name = "propcache"
+version = "0.4.1"
+description = "Accelerated property cache"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "propcache-0.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c2d1fa3201efaf55d730400d945b5b3ab6e672e100ba0f9a409d950ab25d7db"},
+ {file = "propcache-0.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1eb2994229cc8ce7fe9b3db88f5465f5fd8651672840b2e426b88cdb1a30aac8"},
+ {file = "propcache-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:66c1f011f45a3b33d7bcb22daed4b29c0c9e2224758b6be00686731e1b46f925"},
+ {file = "propcache-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9a52009f2adffe195d0b605c25ec929d26b36ef986ba85244891dee3b294df21"},
+ {file = "propcache-0.4.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5d4e2366a9c7b837555cf02fb9be2e3167d333aff716332ef1b7c3a142ec40c5"},
+ {file = "propcache-0.4.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9d2b6caef873b4f09e26ea7e33d65f42b944837563a47a94719cc3544319a0db"},
+ {file = "propcache-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b16ec437a8c8a965ecf95739448dd938b5c7f56e67ea009f4300d8df05f32b7"},
+ {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:296f4c8ed03ca7476813fe666c9ea97869a8d7aec972618671b33a38a5182ef4"},
+ {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:1f0978529a418ebd1f49dad413a2b68af33f85d5c5ca5c6ca2a3bed375a7ac60"},
+ {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fd138803047fb4c062b1c1dd95462f5209456bfab55c734458f15d11da288f8f"},
+ {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8c9b3cbe4584636d72ff556d9036e0c9317fa27b3ac1f0f558e7e84d1c9c5900"},
+ {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f93243fdc5657247533273ac4f86ae106cc6445a0efacb9a1bfe982fcfefd90c"},
+ {file = "propcache-0.4.1-cp310-cp310-win32.whl", hash = "sha256:a0ee98db9c5f80785b266eb805016e36058ac72c51a064040f2bc43b61101cdb"},
+ {file = "propcache-0.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:1cdb7988c4e5ac7f6d175a28a9aa0c94cb6f2ebe52756a3c0cda98d2809a9e37"},
+ {file = "propcache-0.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:d82ad62b19645419fe79dd63b3f9253e15b30e955c0170e5cebc350c1844e581"},
+ {file = "propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf"},
+ {file = "propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5"},
+ {file = "propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e"},
+ {file = "propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566"},
+ {file = "propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165"},
+ {file = "propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc"},
+ {file = "propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48"},
+ {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570"},
+ {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85"},
+ {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e"},
+ {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757"},
+ {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f"},
+ {file = "propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1"},
+ {file = "propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6"},
+ {file = "propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239"},
+ {file = "propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2"},
+ {file = "propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403"},
+ {file = "propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207"},
+ {file = "propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72"},
+ {file = "propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367"},
+ {file = "propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4"},
+ {file = "propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf"},
+ {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3"},
+ {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778"},
+ {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6"},
+ {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9"},
+ {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75"},
+ {file = "propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8"},
+ {file = "propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db"},
+ {file = "propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1"},
+ {file = "propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf"},
+ {file = "propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311"},
+ {file = "propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74"},
+ {file = "propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe"},
+ {file = "propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af"},
+ {file = "propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c"},
+ {file = "propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f"},
+ {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1"},
+ {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24"},
+ {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa"},
+ {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61"},
+ {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66"},
+ {file = "propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81"},
+ {file = "propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e"},
+ {file = "propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1"},
+ {file = "propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b"},
+ {file = "propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566"},
+ {file = "propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835"},
+ {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e"},
+ {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859"},
+ {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b"},
+ {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0"},
+ {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af"},
+ {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393"},
+ {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874"},
+ {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7"},
+ {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1"},
+ {file = "propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717"},
+ {file = "propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37"},
+ {file = "propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a"},
+ {file = "propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12"},
+ {file = "propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c"},
+ {file = "propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded"},
+ {file = "propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641"},
+ {file = "propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4"},
+ {file = "propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44"},
+ {file = "propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d"},
+ {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b"},
+ {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e"},
+ {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f"},
+ {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49"},
+ {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144"},
+ {file = "propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f"},
+ {file = "propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153"},
+ {file = "propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992"},
+ {file = "propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f"},
+ {file = "propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393"},
+ {file = "propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0"},
+ {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a"},
+ {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be"},
+ {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc"},
+ {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a"},
+ {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89"},
+ {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726"},
+ {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367"},
+ {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36"},
+ {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455"},
+ {file = "propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85"},
+ {file = "propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1"},
+ {file = "propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9"},
+ {file = "propcache-0.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3d233076ccf9e450c8b3bc6720af226b898ef5d051a2d145f7d765e6e9f9bcff"},
+ {file = "propcache-0.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:357f5bb5c377a82e105e44bd3d52ba22b616f7b9773714bff93573988ef0a5fb"},
+ {file = "propcache-0.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbc3b6dfc728105b2a57c06791eb07a94229202ea75c59db644d7d496b698cac"},
+ {file = "propcache-0.4.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:182b51b421f0501952d938dc0b0eb45246a5b5153c50d42b495ad5fb7517c888"},
+ {file = "propcache-0.4.1-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4b536b39c5199b96fc6245eb5fb796c497381d3942f169e44e8e392b29c9ebcc"},
+ {file = "propcache-0.4.1-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:db65d2af507bbfbdcedb254a11149f894169d90488dd3e7190f7cdcb2d6cd57a"},
+ {file = "propcache-0.4.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd2dbc472da1f772a4dae4fa24be938a6c544671a912e30529984dd80400cd88"},
+ {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:daede9cd44e0f8bdd9e6cc9a607fc81feb80fae7a5fc6cecaff0e0bb32e42d00"},
+ {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:71b749281b816793678ae7f3d0d84bd36e694953822eaad408d682efc5ca18e0"},
+ {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:0002004213ee1f36cfb3f9a42b5066100c44276b9b72b4e1504cddd3d692e86e"},
+ {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fe49d0a85038f36ba9e3ffafa1103e61170b28e95b16622e11be0a0ea07c6781"},
+ {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:99d43339c83aaf4d32bda60928231848eee470c6bda8d02599cc4cebe872d183"},
+ {file = "propcache-0.4.1-cp39-cp39-win32.whl", hash = "sha256:a129e76735bc792794d5177069691c3217898b9f5cee2b2661471e52ffe13f19"},
+ {file = "propcache-0.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:948dab269721ae9a87fd16c514a0a2c2a1bdb23a9a61b969b0f9d9ee2968546f"},
+ {file = "propcache-0.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:5fd37c406dd6dc85aa743e214cef35dc54bbdd1419baac4f6ae5e5b1a2976938"},
+ {file = "propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237"},
+ {file = "propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d"},
+]
+
[[package]]
name = "psutil"
version = "7.1.3"
@@ -2809,6 +3422,160 @@ files = [
{file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"},
]
+[[package]]
+name = "pydantic"
+version = "2.12.5"
+description = "Data validation using Python type hints"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d"},
+ {file = "pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49"},
+]
+
+[package.dependencies]
+annotated-types = ">=0.6.0"
+pydantic-core = "2.41.5"
+typing-extensions = ">=4.14.1"
+typing-inspection = ">=0.4.2"
+
+[package.extras]
+email = ["email-validator (>=2.0.0)"]
+timezone = ["tzdata"]
+
+[[package]]
+name = "pydantic-core"
+version = "2.41.5"
+description = "Core functionality for Pydantic validation and serialization"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-win32.whl", hash = "sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-win_amd64.whl", hash = "sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b"},
+ {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034"},
+ {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c"},
+ {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2"},
+ {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad"},
+ {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd"},
+ {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc"},
+ {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56"},
+ {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b"},
+ {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8"},
+ {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a"},
+ {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b"},
+ {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2"},
+ {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093"},
+ {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a"},
+ {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963"},
+ {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a"},
+ {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26"},
+ {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808"},
+ {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc"},
+ {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1"},
+ {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84"},
+ {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770"},
+ {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f"},
+ {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51"},
+ {file = "pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e"},
+]
+
+[package.dependencies]
+typing-extensions = ">=4.14.1"
+
[[package]]
name = "pyflakes"
version = "3.1.0"
@@ -3704,6 +4471,20 @@ files = [
docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"]
test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"]
+[[package]]
+name = "typeguard"
+version = "4.5.1"
+description = "Run-time type checker for Python"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "typeguard-4.5.1-py3-none-any.whl", hash = "sha256:44d2bf329d49a244110a090b55f5f91aa82d9a9834ebfd30bcc73651e4a8cc40"},
+ {file = "typeguard-4.5.1.tar.gz", hash = "sha256:f6f8ecbbc819c9bc749983cc67c02391e16a9b43b8b27f15dc70ed7c4a007274"},
+]
+
+[package.dependencies]
+typing_extensions = ">=4.14.0"
+
[[package]]
name = "typing-extensions"
version = "4.15.0"
@@ -3715,6 +4496,35 @@ files = [
{file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"},
]
+[[package]]
+name = "typing-inspect"
+version = "0.9.0"
+description = "Runtime inspection utilities for typing module."
+optional = false
+python-versions = "*"
+files = [
+ {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"},
+ {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"},
+]
+
+[package.dependencies]
+mypy-extensions = ">=0.3.0"
+typing-extensions = ">=3.7.4"
+
+[[package]]
+name = "typing-inspection"
+version = "0.4.2"
+description = "Runtime typing introspection tools"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"},
+ {file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"},
+]
+
+[package.dependencies]
+typing-extensions = ">=4.12.0"
+
[[package]]
name = "tzdata"
version = "2025.2"
@@ -3891,6 +4701,148 @@ files = [
{file = "wrapt-1.17.0.tar.gz", hash = "sha256:16187aa2317c731170a88ef35e8937ae0f533c402872c1ee5e6d079fcf320801"},
]
+[[package]]
+name = "yarl"
+version = "1.23.0"
+description = "Yet another URL library"
+optional = false
+python-versions = ">=3.10"
+files = [
+ {file = "yarl-1.23.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cff6d44cb13d39db2663a22b22305d10855efa0fa8015ddeacc40bc59b9d8107"},
+ {file = "yarl-1.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c53f8347cd4200f0d70a48ad059cabaf24f5adc6ba08622a23423bc7efa10d"},
+ {file = "yarl-1.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a6940a074fb3c48356ed0158a3ca5699c955ee4185b4d7d619be3c327143e05"},
+ {file = "yarl-1.23.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ed5f69ce7be7902e5c70ea19eb72d20abf7d725ab5d49777d696e32d4fc1811d"},
+ {file = "yarl-1.23.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:389871e65468400d6283c0308e791a640b5ab5c83bcee02a2f51295f95e09748"},
+ {file = "yarl-1.23.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dda608c88cf709b1d406bdfcd84d8d63cff7c9e577a403c6108ce8ce9dcc8764"},
+ {file = "yarl-1.23.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8c4fe09e0780c6c3bf2b7d4af02ee2394439d11a523bbcf095cf4747c2932007"},
+ {file = "yarl-1.23.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:31c9921eb8bd12633b41ad27686bbb0b1a2a9b8452bfdf221e34f311e9942ed4"},
+ {file = "yarl-1.23.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5f10fd85e4b75967468af655228fbfd212bdf66db1c0d135065ce288982eda26"},
+ {file = "yarl-1.23.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:dbf507e9ef5688bada447a24d68b4b58dd389ba93b7afc065a2ba892bea54769"},
+ {file = "yarl-1.23.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:85e9beda1f591bc73e77ea1c51965c68e98dafd0fec72cdd745f77d727466716"},
+ {file = "yarl-1.23.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0e1fdaa14ef51366d7757b45bde294e95f6c8c049194e793eedb8387c86d5993"},
+ {file = "yarl-1.23.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:75e3026ab649bf48f9a10c0134512638725b521340293f202a69b567518d94e0"},
+ {file = "yarl-1.23.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:80e6d33a3d42a7549b409f199857b4fb54e2103fc44fb87605b6663b7a7ff750"},
+ {file = "yarl-1.23.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5ec2f42d41ccbd5df0270d7df31618a8ee267bfa50997f5d720ddba86c4a83a6"},
+ {file = "yarl-1.23.0-cp310-cp310-win32.whl", hash = "sha256:debe9c4f41c32990771be5c22b56f810659f9ddf3d63f67abfdcaa2c6c9c5c1d"},
+ {file = "yarl-1.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:ab5f043cb8a2d71c981c09c510da013bc79fd661f5c60139f00dd3c3cc4f2ffb"},
+ {file = "yarl-1.23.0-cp310-cp310-win_arm64.whl", hash = "sha256:263cd4f47159c09b8b685890af949195b51d1aa82ba451c5847ca9bc6413c220"},
+ {file = "yarl-1.23.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b35d13d549077713e4414f927cdc388d62e543987c572baee613bf82f11a4b99"},
+ {file = "yarl-1.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbb0fef01f0c6b38cb0f39b1f78fc90b807e0e3c86a7ff3ce74ad77ce5c7880c"},
+ {file = "yarl-1.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc52310451fc7c629e13c4e061cbe2dd01684d91f2f8ee2821b083c58bd72432"},
+ {file = "yarl-1.23.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2c6b50c7b0464165472b56b42d4c76a7b864597007d9c085e8b63e185cf4a7a"},
+ {file = "yarl-1.23.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:aafe5dcfda86c8af00386d7781d4c2181b5011b7be3f2add5e99899ea925df05"},
+ {file = "yarl-1.23.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ee33b875f0b390564c1fb7bc528abf18c8ee6073b201c6ae8524aca778e2d83"},
+ {file = "yarl-1.23.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4c41e021bc6d7affb3364dc1e1e5fa9582b470f283748784bd6ea0558f87f42c"},
+ {file = "yarl-1.23.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:99c8a9ed30f4164bc4c14b37a90208836cbf50d4ce2a57c71d0f52c7fb4f7598"},
+ {file = "yarl-1.23.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2af5c81a1f124609d5f33507082fc3f739959d4719b56877ab1ee7e7b3d602b"},
+ {file = "yarl-1.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6b41389c19b07c760c7e427a3462e8ab83c4bb087d127f0e854c706ce1b9215c"},
+ {file = "yarl-1.23.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:1dc702e42d0684f42d6519c8d581e49c96cefaaab16691f03566d30658ee8788"},
+ {file = "yarl-1.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0e40111274f340d32ebcc0a5668d54d2b552a6cca84c9475859d364b380e3222"},
+ {file = "yarl-1.23.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:4764a6a7588561a9aef92f65bda2c4fb58fe7c675c0883862e6df97559de0bfb"},
+ {file = "yarl-1.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:03214408cfa590df47728b84c679ae4ef00be2428e11630277be0727eba2d7cc"},
+ {file = "yarl-1.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:170e26584b060879e29fac213e4228ef063f39128723807a312e5c7fec28eff2"},
+ {file = "yarl-1.23.0-cp311-cp311-win32.whl", hash = "sha256:51430653db848d258336cfa0244427b17d12db63d42603a55f0d4546f50f25b5"},
+ {file = "yarl-1.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf49a3ae946a87083ef3a34c8f677ae4243f5b824bfc4c69672e72b3d6719d46"},
+ {file = "yarl-1.23.0-cp311-cp311-win_arm64.whl", hash = "sha256:b39cb32a6582750b6cc77bfb3c49c0f8760dc18dc96ec9fb55fbb0f04e08b928"},
+ {file = "yarl-1.23.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1932b6b8bba8d0160a9d1078aae5838a66039e8832d41d2992daa9a3a08f7860"},
+ {file = "yarl-1.23.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:411225bae281f114067578891bc75534cfb3d92a3b4dfef7a6ca78ba354e6069"},
+ {file = "yarl-1.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13a563739ae600a631c36ce096615fe307f131344588b0bc0daec108cdb47b25"},
+ {file = "yarl-1.23.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cbf44c5cb4a7633d078788e1b56387e3d3cf2b8139a3be38040b22d6c3221c8"},
+ {file = "yarl-1.23.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53ad387048f6f09a8969631e4de3f1bf70c50e93545d64af4f751b2498755072"},
+ {file = "yarl-1.23.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4a59ba56f340334766f3a4442e0efd0af895fae9e2b204741ef885c446b3a1a8"},
+ {file = "yarl-1.23.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:803a3c3ce4acc62eaf01eaca1208dcf0783025ef27572c3336502b9c232005e7"},
+ {file = "yarl-1.23.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3d2bff8f37f8d0f96c7ec554d16945050d54462d6e95414babaa18bfafc7f51"},
+ {file = "yarl-1.23.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c75eb09e8d55bceb4367e83496ff8ef2bc7ea6960efb38e978e8073ea59ecb67"},
+ {file = "yarl-1.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877b0738624280e34c55680d6054a307aa94f7d52fa0e3034a9cc6e790871da7"},
+ {file = "yarl-1.23.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b5405bb8f0e783a988172993cfc627e4d9d00432d6bbac65a923041edacf997d"},
+ {file = "yarl-1.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1c3a3598a832590c5a3ce56ab5576361b5688c12cb1d39429cf5dba30b510760"},
+ {file = "yarl-1.23.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8419ebd326430d1cbb7efb5292330a2cf39114e82df5cc3d83c9a0d5ebeaf2f2"},
+ {file = "yarl-1.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:be61f6fff406ca40e3b1d84716fde398fc08bc63dd96d15f3a14230a0973ed86"},
+ {file = "yarl-1.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ceb13c5c858d01321b5d9bb65e4cf37a92169ea470b70fec6f236b2c9dd7e34"},
+ {file = "yarl-1.23.0-cp312-cp312-win32.whl", hash = "sha256:fffc45637bcd6538de8b85f51e3df3223e4ad89bccbfca0481c08c7fc8b7ed7d"},
+ {file = "yarl-1.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:f69f57305656a4852f2a7203efc661d8c042e6cc67f7acd97d8667fb448a426e"},
+ {file = "yarl-1.23.0-cp312-cp312-win_arm64.whl", hash = "sha256:6e87a6e8735b44816e7db0b2fbc9686932df473c826b0d9743148432e10bb9b9"},
+ {file = "yarl-1.23.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:16c6994ac35c3e74fb0ae93323bf8b9c2a9088d55946109489667c510a7d010e"},
+ {file = "yarl-1.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4a42e651629dafb64fd5b0286a3580613702b5809ad3f24934ea87595804f2c5"},
+ {file = "yarl-1.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7c6b9461a2a8b47c65eef63bb1c76a4f1c119618ffa99ea79bc5bb1e46c5821b"},
+ {file = "yarl-1.23.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2569b67d616eab450d262ca7cb9f9e19d2f718c70a8b88712859359d0ab17035"},
+ {file = "yarl-1.23.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e9d9a4d06d3481eab79803beb4d9bd6f6a8e781ec078ac70d7ef2dcc29d1bea5"},
+ {file = "yarl-1.23.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f514f6474e04179d3d33175ed3f3e31434d3130d42ec153540d5b157deefd735"},
+ {file = "yarl-1.23.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fda207c815b253e34f7e1909840fd14299567b1c0eb4908f8c2ce01a41265401"},
+ {file = "yarl-1.23.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34b6cf500e61c90f305094911f9acc9c86da1a05a7a3f5be9f68817043f486e4"},
+ {file = "yarl-1.23.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d7504f2b476d21653e4d143f44a175f7f751cd41233525312696c76aa3dbb23f"},
+ {file = "yarl-1.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:578110dd426f0d209d1509244e6d4a3f1a3e9077655d98c5f22583d63252a08a"},
+ {file = "yarl-1.23.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:609d3614d78d74ebe35f54953c5bbd2ac647a7ddb9c30a5d877580f5e86b22f2"},
+ {file = "yarl-1.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4966242ec68afc74c122f8459abd597afd7d8a60dc93d695c1334c5fd25f762f"},
+ {file = "yarl-1.23.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e0fd068364a6759bc794459f0a735ab151d11304346332489c7972bacbe9e72b"},
+ {file = "yarl-1.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:39004f0ad156da43e86aa71f44e033de68a44e5a31fc53507b36dd253970054a"},
+ {file = "yarl-1.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e5723c01a56c5028c807c701aa66722916d2747ad737a046853f6c46f4875543"},
+ {file = "yarl-1.23.0-cp313-cp313-win32.whl", hash = "sha256:1b6b572edd95b4fa8df75de10b04bc81acc87c1c7d16bcdd2035b09d30acc957"},
+ {file = "yarl-1.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:baaf55442359053c7d62f6f8413a62adba3205119bcb6f49594894d8be47e5e3"},
+ {file = "yarl-1.23.0-cp313-cp313-win_arm64.whl", hash = "sha256:fb4948814a2a98e3912505f09c9e7493b1506226afb1f881825368d6fb776ee3"},
+ {file = "yarl-1.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:aecfed0b41aa72b7881712c65cf764e39ce2ec352324f5e0837c7048d9e6daaa"},
+ {file = "yarl-1.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a41bcf68efd19073376eb8cf948b8d9be0af26256403e512bb18f3966f1f9120"},
+ {file = "yarl-1.23.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cde9a2ecd91668bcb7f077c4966d8ceddb60af01b52e6e3e2680e4cf00ad1a59"},
+ {file = "yarl-1.23.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5023346c4ee7992febc0068e7593de5fa2bf611848c08404b35ebbb76b1b0512"},
+ {file = "yarl-1.23.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1009abedb49ae95b136a8904a3f71b342f849ffeced2d3747bf29caeda218c4"},
+ {file = "yarl-1.23.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a8d00f29b42f534cc8aa3931cfe773b13b23e561e10d2b26f27a8d309b0e82a1"},
+ {file = "yarl-1.23.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:95451e6ce06c3e104556d73b559f5da6c34a069b6b62946d3ad66afcd51642ea"},
+ {file = "yarl-1.23.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:531ef597132086b6cf96faa7c6c1dcd0361dd5f1694e5cc30375907b9b7d3ea9"},
+ {file = "yarl-1.23.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:88f9fb0116fbfcefcab70f85cf4b74a2b6ce5d199c41345296f49d974ddb4123"},
+ {file = "yarl-1.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e7b0460976dc75cb87ad9cc1f9899a4b97751e7d4e77ab840fc9b6d377b8fd24"},
+ {file = "yarl-1.23.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:115136c4a426f9da976187d238e84139ff6b51a20839aa6e3720cd1026d768de"},
+ {file = "yarl-1.23.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ead11956716a940c1abc816b7df3fa2b84d06eaed8832ca32f5c5e058c65506b"},
+ {file = "yarl-1.23.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:fe8f8f5e70e6dbdfca9882cd9deaac058729bcf323cf7a58660901e55c9c94f6"},
+ {file = "yarl-1.23.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:a0e317df055958a0c1e79e5d2aa5a5eaa4a6d05a20d4b0c9c3f48918139c9fc6"},
+ {file = "yarl-1.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f0fd84de0c957b2d280143522c4f91a73aada1923caee763e24a2b3fda9f8a5"},
+ {file = "yarl-1.23.0-cp313-cp313t-win32.whl", hash = "sha256:93a784271881035ab4406a172edb0faecb6e7d00f4b53dc2f55919d6c9688595"},
+ {file = "yarl-1.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dd00607bffbf30250fe108065f07453ec124dbf223420f57f5e749b04295e090"},
+ {file = "yarl-1.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ac09d42f48f80c9ee1635b2fcaa819496a44502737660d3c0f2ade7526d29144"},
+ {file = "yarl-1.23.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:21d1b7305a71a15b4794b5ff22e8eef96ff4a6d7f9657155e5aa419444b28912"},
+ {file = "yarl-1.23.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:85610b4f27f69984932a7abbe52703688de3724d9f72bceb1cca667deff27474"},
+ {file = "yarl-1.23.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:23f371bd662cf44a7630d4d113101eafc0cfa7518a2760d20760b26021454719"},
+ {file = "yarl-1.23.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a80f77dc1acaaa61f0934176fccca7096d9b1ff08c8ba9cddf5ae034a24319"},
+ {file = "yarl-1.23.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:bd654fad46d8d9e823afbb4f87c79160b5a374ed1ff5bde24e542e6ba8f41434"},
+ {file = "yarl-1.23.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:682bae25f0a0dd23a056739f23a134db9f52a63e2afd6bfb37ddc76292bbd723"},
+ {file = "yarl-1.23.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a82836cab5f197a0514235aaf7ffccdc886ccdaa2324bc0aafdd4ae898103039"},
+ {file = "yarl-1.23.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c57676bdedc94cd3bc37724cf6f8cd2779f02f6aba48de45feca073e714fe52"},
+ {file = "yarl-1.23.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c7f8dc16c498ff06497c015642333219871effba93e4a2e8604a06264aca5c5c"},
+ {file = "yarl-1.23.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5ee586fb17ff8f90c91cf73c6108a434b02d69925f44f5f8e0d7f2f260607eae"},
+ {file = "yarl-1.23.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:17235362f580149742739cc3828b80e24029d08cbb9c4bda0242c7b5bc610a8e"},
+ {file = "yarl-1.23.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:0793e2bd0cf14234983bbb371591e6bea9e876ddf6896cdcc93450996b0b5c85"},
+ {file = "yarl-1.23.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:3650dc2480f94f7116c364096bc84b1d602f44224ef7d5c7208425915c0475dd"},
+ {file = "yarl-1.23.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f40e782d49630ad384db66d4d8b73ff4f1b8955dc12e26b09a3e3af064b3b9d6"},
+ {file = "yarl-1.23.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94f8575fbdf81749008d980c17796097e645574a3b8c28ee313931068dad14fe"},
+ {file = "yarl-1.23.0-cp314-cp314-win32.whl", hash = "sha256:c8aa34a5c864db1087d911a0b902d60d203ea3607d91f615acd3f3108ac32169"},
+ {file = "yarl-1.23.0-cp314-cp314-win_amd64.whl", hash = "sha256:63e92247f383c85ab00dd0091e8c3fa331a96e865459f5ee80353c70a4a42d70"},
+ {file = "yarl-1.23.0-cp314-cp314-win_arm64.whl", hash = "sha256:70efd20be968c76ece7baa8dafe04c5be06abc57f754d6f36f3741f7aa7a208e"},
+ {file = "yarl-1.23.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:9a18d6f9359e45722c064c97464ec883eb0e0366d33eda61cb19a244bf222679"},
+ {file = "yarl-1.23.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:2803ed8b21ca47a43da80a6fd1ed3019d30061f7061daa35ac54f63933409412"},
+ {file = "yarl-1.23.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:394906945aa8b19fc14a61cf69743a868bb8c465efe85eee687109cc540b98f4"},
+ {file = "yarl-1.23.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71d006bee8397a4a89f469b8deb22469fe7508132d3c17fa6ed871e79832691c"},
+ {file = "yarl-1.23.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:62694e275c93d54f7ccedcfef57d42761b2aad5234b6be1f3e3026cae4001cd4"},
+ {file = "yarl-1.23.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31de1613658308efdb21ada98cbc86a97c181aa050ba22a808120bb5be3ab94"},
+ {file = "yarl-1.23.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb1e8b8d66c278b21d13b0a7ca22c41dd757a7c209c6b12c313e445c31dd3b28"},
+ {file = "yarl-1.23.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50f9d8d531dfb767c565f348f33dd5139a6c43f5cbdf3f67da40d54241df93f6"},
+ {file = "yarl-1.23.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:575aa4405a656e61a540f4a80eaa5260f2a38fff7bfdc4b5f611840d76e9e277"},
+ {file = "yarl-1.23.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:041b1a4cefacf65840b4e295c6985f334ba83c30607441ae3cf206a0eed1a2e4"},
+ {file = "yarl-1.23.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:d38c1e8231722c4ce40d7593f28d92b5fc72f3e9774fe73d7e800ec32299f63a"},
+ {file = "yarl-1.23.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:d53834e23c015ee83a99377db6e5e37d8484f333edb03bd15b4bc312cc7254fb"},
+ {file = "yarl-1.23.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:2e27c8841126e017dd2a054a95771569e6070b9ee1b133366d8b31beb5018a41"},
+ {file = "yarl-1.23.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:76855800ac56f878847a09ce6dba727c93ca2d89c9e9d63002d26b916810b0a2"},
+ {file = "yarl-1.23.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e09fd068c2e169a7070d83d3bde728a4d48de0549f975290be3c108c02e499b4"},
+ {file = "yarl-1.23.0-cp314-cp314t-win32.whl", hash = "sha256:73309162a6a571d4cbd3b6a1dcc703c7311843ae0d1578df6f09be4e98df38d4"},
+ {file = "yarl-1.23.0-cp314-cp314t-win_amd64.whl", hash = "sha256:4503053d296bc6e4cbd1fad61cf3b6e33b939886c4f249ba7c78b602214fabe2"},
+ {file = "yarl-1.23.0-cp314-cp314t-win_arm64.whl", hash = "sha256:44bb7bef4ea409384e3f8bc36c063d77ea1b8d4a5b2706956c0d6695f07dcc25"},
+ {file = "yarl-1.23.0-py3-none-any.whl", hash = "sha256:a2df6afe50dea8ae15fa34c9f824a3ee958d785fd5d089063d960bae1daa0a3f"},
+ {file = "yarl-1.23.0.tar.gz", hash = "sha256:53b1ea6ca88ebd4420379c330aea57e258408dd0df9af0992e5de2078dc9f5d5"},
+]
+
+[package.dependencies]
+idna = ">=2.0"
+multidict = ">=4.0"
+propcache = ">=0.2.1"
+
[[package]]
name = "zipp"
version = "3.23.0"
@@ -3916,4 +4868,4 @@ pyspark = ["delta-spark", "pyspark"]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
-content-hash = "0a49dc03688a5efdeba9146d0ba35179079ebb6e90c3b031f14d122a2fe031dd"
+content-hash = "2aebc6abdd82977c2e5e633ce948727139a1733e69b61e137a460495c6f63381"
diff --git a/pyproject.toml b/pyproject.toml
index 425ca28..af43b1d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -15,6 +15,13 @@ opentelemetry-api = "^1.29.0"
pyspark = { version = "^3.3.1", optional = true }
delta-spark = { version = ">=2.2.0,<4", optional = true }
azure-communication-email = "^1.0.0"
+polars = "^1.38.1"
+pandera = { extras = ["polars"], version = "^0.29.0" }
+deltalake = "^1.4.2"
+azure-storage-blob = "^12.28.0"
+fastexcel = "^0.19.0"
+fsspec = "^2026.2.0"
+azure-data-tables = "^12.7.0"
[tool.poetry.extras]
pyspark = ["pyspark", "delta-spark"]
diff --git a/src/corvus_python/monitoring/__init__.py b/src/corvus_python/monitoring/__init__.py
index aeeb572..b970331 100644
--- a/src/corvus_python/monitoring/__init__.py
+++ b/src/corvus_python/monitoring/__init__.py
@@ -1,5 +1,17 @@
-from .tracing import all_methods_start_new_current_span_with_method_name
+from .tracing import (
+ all_methods_start_new_current_span_with_method_name,
+ add_attributes_to_span,
+ start_as_current_span_with_method_name,
+ add_attributes_to_current_span,
+ add_kwargs_to_span,
+ add_kwargs_to_current_span,
+)
__all__ = [
"all_methods_start_new_current_span_with_method_name",
+ "add_attributes_to_span",
+ "start_as_current_span_with_method_name",
+ "add_attributes_to_current_span",
+ "add_kwargs_to_span",
+ "add_kwargs_to_current_span",
]
diff --git a/src/corvus_python/monitoring/tracing.py b/src/corvus_python/monitoring/tracing.py
index 7ee846c..61bc506 100644
--- a/src/corvus_python/monitoring/tracing.py
+++ b/src/corvus_python/monitoring/tracing.py
@@ -1,16 +1,18 @@
+from typing import Any
+
from opentelemetry import trace
from functools import wraps
def start_as_current_span_with_method_name(tracer: trace.Tracer):
"""
- Function decorator which starts a new span with the full name of the method (i.e. class_name.method_name for
- methods within classes, or just method_name for standalone functions) as the span name. The span is then set as
- the current span for the duration of the method call and can be accessed using trace.get_current_span().
+ Function decorator which starts a new span with the full name of the method (i.e. class_name.method_name for
+ methods within classes, or just method_name for standalone functions) as the span name. The span is then set as
+ the current span for the duration of the method call and can be accessed using trace.get_current_span().
- Args:
- tracer (trace.Tracer): The tracer to use for starting the span. Create a tracer for the source file using
- trace.get_tracer(__name__) and pass it to this decorator.
+ Args:
+ tracer (trace.Tracer): The tracer to use for starting the span. Create a tracer for the source file using
+ trace.get_tracer(__name__) and pass it to this decorator.
"""
def decorator(func):
@@ -26,11 +28,11 @@ def wrapper(*args, **kwargs):
def all_methods_start_new_current_span_with_method_name(tracer: trace.Tracer):
"""
- Class decorator which applies start_as_current_span_with_method_name to all methods within the class.
+ Class decorator which applies start_as_current_span_with_method_name to all methods within the class.
- Args:
- tracer (trace.Tracer): The tracer to use for starting the span. Create a tracer for the source file using
- trace.get_tracer(__name__) and pass it to this decorator.
+ Args:
+ tracer (trace.Tracer): The tracer to use for starting the span. Create a tracer for the source file using
+ trace.get_tracer(__name__) and pass it to this decorator.
"""
decorator = start_as_current_span_with_method_name(tracer)
@@ -46,57 +48,57 @@ def decorate(cls):
return decorate
-def add_attributes_to_span(span: trace.Span, **kwargs: dict[str, any]):
+def add_attributes_to_span(span: trace.Span, **kwargs: Any):
"""
- Adds the specified key-value pairs to the specified span as attributes.
+ Adds the specified key-value pairs to the specified span as attributes.
- For example, calling:
- add_attributes_to_span(span, key1="value1", key2="value2")
- is equivalent to calling:
- span.set_attributes({"key1": "value1", "key2": "value2"})
+ For example, calling:
+ add_attributes_to_span(span, key1="value1", key2="value2")
+ is equivalent to calling:
+ span.set_attributes({"key1": "value1", "key2": "value2"})
- Args:
- **kwargs: The key-value pairs to add to the span as attributes.
+ Args:
+ **kwargs: The key-value pairs to add to the span as attributes.
"""
if span is not None:
kwargs_as_strings = {k: str(v) for k, v in kwargs.items()}
span.set_attributes(kwargs_as_strings)
-def add_attributes_to_current_span(**kwargs: dict[str, any]):
+def add_attributes_to_current_span(**kwargs: Any):
"""
- Adds the specified key-value pairs to the current span as attributes.
+ Adds the specified key-value pairs to the current span as attributes.
- Args:
- **kwargs: The key-value pairs to add to the span as attributes.
+ Args:
+ **kwargs: The key-value pairs to add to the span as attributes.
"""
add_attributes_to_span(trace.get_current_span(), **kwargs)
-def add_kwargs_to_span(span: trace.Span, keys: list[str], source_kwargs: dict[str, any]):
+def add_kwargs_to_span(span: trace.Span, keys: list[str], source_kwargs: dict[str, Any]):
"""
- Adds the specified keys from the source_kwargs dictionary to the span as attributes.
-
- Args:
- span (trace.Span): The span to add the attributes to.
- keys (list[str]): The keys from the source_kwargs to add to the span. These are manually specified to avoid
- adding sensitive information to the span.
- source_kwargs (dict[str, any]): The dictionary to get the values from. This is typically the kwargs
- dictionary of the method being traced.
+ Adds the specified keys from the source_kwargs dictionary to the span as attributes.
+
+ Args:
+ span (trace.Span): The span to add the attributes to.
+ keys (list[str]): The keys from the source_kwargs to add to the span. These are manually specified to avoid
+ adding sensitive information to the span.
+ source_kwargs (dict[str, Any]): The dictionary to get the values from. This is typically the kwargs
+ dictionary of the method being traced.
"""
kwargs_to_add = {key: source_kwargs[key] for key in keys if key in source_kwargs}
add_attributes_to_span(span, **kwargs_to_add)
-def add_kwargs_to_current_span(keys: list[str], source_kwargs: dict[str, any]):
+def add_kwargs_to_current_span(keys: list[str], source_kwargs: Any):
"""
- Adds the specified keys from the source_kwargs dictionary to the current span as attributes.
+ Adds the specified keys from the source_kwargs dictionary to the current span as attributes.
- Args:
- keys (list[str]): The keys from the source_kwargs to add to the span. These are manually specified to avoid
- adding sensitive information to the span.
- source_kwargs (dict[str, any]): The dictionary to get the values from. This is typically the kwargs
- dictionary of the method being traced.
+ Args:
+ keys (list[str]): The keys from the source_kwargs to add to the span. These are manually specified to avoid
+ adding sensitive information to the span.
+ source_kwargs (dict[str, Any]): The dictionary to get the values from. This is typically the kwargs
+ dictionary of the method being traced.
"""
span = trace.get_current_span()
add_kwargs_to_span(span, keys, source_kwargs)
diff --git a/src/corvus_python/repositories/__init__.py b/src/corvus_python/repositories/__init__.py
new file mode 100644
index 0000000..f1012ef
--- /dev/null
+++ b/src/corvus_python/repositories/__init__.py
@@ -0,0 +1,6 @@
+from .database_definition import DatabaseDefinition, TableDefinition # noqa: F401
+from .polars_delta_table_repository import PolarsDeltaTableRepository # noqa: F401
+from .polars_csv_data_repository import PolarsCsvDataRepository # noqa: F401
+from .polars_azure_table_repository import PolarsAzureTableRepository # noqa: F401
+from .polars_excel_data_repository import PolarsExcelDataRepository # noqa: F401
+from .polars_ndjson_data_repository import PolarsNdJsonDataRepository # noqa: F401
diff --git a/src/corvus_python/repositories/database_definition.py b/src/corvus_python/repositories/database_definition.py
new file mode 100644
index 0000000..74552aa
--- /dev/null
+++ b/src/corvus_python/repositories/database_definition.py
@@ -0,0 +1,35 @@
+from dataclasses import dataclass
+from typing import Optional
+
+from pandera.polars import DataFrameSchema
+
+
+@dataclass
+class TableDefinition:
+ """Defines a single table within a database, including its Pandera schema and optional metadata.
+
+ Attributes:
+ name: The name of the table.
+ schema: The Pandera DataFrameSchema used to validate data written to the table.
+ title: An optional human-readable display name for the table.
+ db_schema: An optional SQL schema name (e.g. "dbo") used when creating SQL views
+ over this table in Synapse or Fabric. Has no effect on Delta table storage paths.
+ """
+
+ name: str
+ schema: DataFrameSchema
+ title: Optional[str] = None
+ db_schema: Optional[str] = None
+
+
+@dataclass
+class DatabaseDefinition:
+ """Defines a logical database consisting of a collection of tables.
+
+ Attributes:
+ name: The name of the database. Used as a path segment when resolving table storage paths.
+ tables: The list of tables that belong to this database.
+ """
+
+ name: str
+ tables: list[TableDefinition]
diff --git a/src/corvus_python/repositories/polars_azure_table_repository.py b/src/corvus_python/repositories/polars_azure_table_repository.py
new file mode 100644
index 0000000..6d88405
--- /dev/null
+++ b/src/corvus_python/repositories/polars_azure_table_repository.py
@@ -0,0 +1,81 @@
+import logging
+from typing import Dict
+import polars as pl
+from azure.data.tables import TableServiceClient, EntityProperty
+from azure.identity import DefaultAzureCredential
+
+
+class PolarsAzureTableRepository:
+ """
+ Repository for interacting with Azure Table Storage using Polars DataFrames.
+ """
+
+ def __init__(self, storage_account_name: str):
+ """
+ Initializes the repository with the given storage account name.
+
+ Args:
+ storage_account_name (str): The name of the Azure storage account.
+ """
+ self.logger: logging.Logger = logging.getLogger(__name__)
+ self.table_service_client = TableServiceClient(
+ endpoint=f"https://{storage_account_name}.table.core.windows.net",
+ credential=DefaultAzureCredential(),
+ )
+
+ def query(
+ self, table_name: str, query_filter: str, parameters: Dict[str, str], schema: dict[str, pl.DataType] = None
+ ) -> pl.DataFrame:
+ """
+ Queries data from the specified Azure Table and loads it into a Polars DataFrame.
+
+ Args:
+ table_name (str): The name of the Azure Table to load data from.
+ query_filter (str): The query to filter the data.
+ parameters (Dict[str, str]): Parameters for the query filter.
+ schema (dict[str, pl.DataType]): Optional schema for the resulting DataFrame.
+ Returns:
+ pl.DataFrame: The data loaded from the Azure Table as a Polars DataFrame.
+ """
+ self.logger.info("query_table - Table name: %s - Query: %s", table_name, query_filter)
+
+ table_client = self.table_service_client.get_table_client(table_name)
+ entities = list(table_client.query_entities(query_filter, parameters=parameters))
+
+ if not entities:
+ self.logger.warning("query_table - No data found in table: %s", table_name)
+ return pl.DataFrame(schema=schema)
+
+ # Some types have their values wrapped in an EntityProperty (GUID, INT64, BINARY)
+ for entity in entities:
+ for key, value in list(entity.items()):
+ if isinstance(value, EntityProperty):
+ entity[key] = value.value
+
+ df = pl.DataFrame(entities, schema=schema)
+ self.logger.info("query_table - Loaded %d records from table: %s", df.height, table_name)
+ return df
+
+ def get_entities_partition_key_starts_with(
+ self, table_name: str, partition_key_prefix: str, schema: dict[str, pl.DataType] = None
+ ) -> pl.DataFrame:
+ """
+ Retrieves entities from the specified Azure Table where the PartitionKey starts with the given prefix.
+
+ Args:
+ table_name (str): The name of the Azure Table to query.
+ partition_key_prefix (str): The prefix to filter PartitionKeys.
+ schema (dict[str, pl.DataType]): Optional schema for the resulting DataFrame.
+
+ Returns:
+ pl.DataFrame: The data loaded from the Azure Table as a Polars DataFrame.
+ """
+ query_filter = "PartitionKey ge @prefix and PartitionKey lt @next_prefix"
+ next_prefix = partition_key_prefix[:-1] + chr(ord(partition_key_prefix[-1]) + 1)
+
+ parameters = {
+ "prefix": partition_key_prefix,
+ "next_prefix": next_prefix,
+ }
+
+ return self.query(table_name, query_filter, parameters, schema=schema)
diff --git a/src/corvus_python/repositories/polars_csv_data_repository.py b/src/corvus_python/repositories/polars_csv_data_repository.py
new file mode 100644
index 0000000..26aeaae
--- /dev/null
+++ b/src/corvus_python/repositories/polars_csv_data_repository.py
@@ -0,0 +1,53 @@
+import polars as pl
+import logging
+from opentelemetry import trace
+
+from ..storage import StorageConfiguration, DataLakeLayer
+from ..monitoring import all_methods_start_new_current_span_with_method_name
+
+tracer = trace.get_tracer(__name__)
+
+
+@all_methods_start_new_current_span_with_method_name(tracer)
+class PolarsCsvDataRepository:
+ def __init__(
+ self,
+ file_system_configuration: StorageConfiguration,
+ data_lake_layer: DataLakeLayer,
+ base_path: str,
+ ):
+
+ self.file_system_configuration = file_system_configuration
+ self.base_path = base_path
+ self.data_lake_layer = data_lake_layer
+ self.logger = logging.getLogger(__name__)
+
+ def load_csv(
+ self, object_name: str, snapshot_timestamp: str, include_file_paths: str | None = None
+ ) -> pl.DataFrame:
+
+ self.logger.info(
+ "load_csv - Object name: %s, Snapshot timestamp: %s",
+ object_name,
+ snapshot_timestamp,
+ )
+
+ # If object name has .csv suffix, strip it off
+ if object_name.endswith(".csv"):
+ object_name = object_name[:-4]
+
+ path = self._get_csv_file_path(object_name, snapshot_timestamp)
+
+ self.logger.info("load_csv - Target file path: %s", path)
+
+ return pl.scan_csv(
+ path, storage_options=self.file_system_configuration.storage_options, include_file_paths=include_file_paths
+ ).collect()
+
+ def _get_csv_file_path(self, object_name: str, snapshot_timestamp: str):
+ path = self.file_system_configuration.get_full_path(
+ self.data_lake_layer,
+ f"{self.base_path}/snapshot_time={snapshot_timestamp}/{object_name}.csv",
+ )
+
+ return path
diff --git a/src/corvus_python/repositories/polars_delta_table_repository.py b/src/corvus_python/repositories/polars_delta_table_repository.py
new file mode 100644
index 0000000..c073f53
--- /dev/null
+++ b/src/corvus_python/repositories/polars_delta_table_repository.py
@@ -0,0 +1,245 @@
+import polars as pl
+from deltalake import DeltaTable, write_deltalake
+from opentelemetry import trace
+
+from ..storage import (
+ DataLakeLayer,
+ StorageConfiguration,
+)
+from ..monitoring import all_methods_start_new_current_span_with_method_name
+from . import DatabaseDefinition, TableDefinition
+from ..schema import pandera_polars_to_deltalake_schema
+
+tracer = trace.get_tracer(__name__)
+
+
+@all_methods_start_new_current_span_with_method_name(tracer)
+class PolarsDeltaTableRepository:
+ """
+ A repository for managing Delta Lake tables.
+
+ This class provides methods for reading, writing, and managing Delta tables
+ within a specified data lake layer. It handles schema validation using Pandera
+ and integrates with OpenTelemetry for tracing.
+ """
+
+ def __init__(
+ self,
+ storage_configuration: StorageConfiguration,
+ data_lake_layer: DataLakeLayer,
+ base_path: str,
+ database_definition: DatabaseDefinition,
+ ):
+ """
+ Initializes the PolarsDeltaTableRepository.
+
+ Args:
+ storage_configuration: Configuration for accessing storage.
+ data_lake_layer: The data lake layer (e.g., Bronze, Silver, Gold).
+ base_path: The base path within the data lake layer.
+ database_definition: The definition of the database and its tables.
+ """
+ self.storage_configuration = storage_configuration
+ self.data_lake_layer = data_lake_layer
+ self.base_path = base_path
+ self.database_definition = database_definition
+ self.initialised = False
+ self.storage_options = self.storage_configuration.storage_options
+ self.pandera_schemas = {table.name: table.schema for table in self.database_definition.tables}
+
+ def read_data(self, table_name: str) -> pl.LazyFrame | None:
+ """
+ Reads data from a Delta table into a Polars LazyFrame.
+
+ Args:
+ table_name: The name of the table to read.
+
+ Returns:
+ A Polars LazyFrame containing the table data, or None if the table is empty.
+ """
+ path = self._get_table_path(table_name)
+
+ df = pl.scan_delta(path, storage_options=self.storage_options)
+
+ return df
+
+ def overwrite_table(self, table_name: str, data: pl.DataFrame | pl.LazyFrame, overwrite_schema: bool = False):
+ span = trace.get_current_span()
+
+ if isinstance(data, pl.LazyFrame):
+ data = data.collect()
+
+ span.set_attributes(
+ {
+ "row_count": data.height,
+ "database_name": self.database_definition.name,
+ "table_name": table_name,
+ }
+ )
+
+ self.ensure_initialised()
+
+ path = self._get_table_path(table_name)
+
+ schema = self.pandera_schemas[table_name]
+
+ schema.validate(data, lazy=False)
+
+ data.write_delta(
+ path,
+ mode="overwrite",
+ overwrite_schema=overwrite_schema,
+ storage_options=self.storage_options,
+ )
+
+ def overwrite_table_lazy(self, table_name: str, data: pl.LazyFrame, overwrite_schema: bool = False):
+ span = trace.get_current_span()
+
+ span.set_attributes(
+ {
+ "database_name": self.database_definition.name,
+ "table_name": table_name,
+ }
+ )
+
+ self.ensure_initialised()
+
+ path = self._get_table_path(table_name)
+
+ schema = self.pandera_schemas[table_name]
+
+ schema.validate(data, lazy=False)
+
+ if overwrite_schema:
+ delta_write_options = {"schema_mode": "overwrite"}
+ else:
+ delta_write_options = None
+
+ data.sink_delta(
+ path,
+ mode="overwrite",
+ delta_write_options=delta_write_options,
+ storage_options=self.storage_options,
+ )
+
+ def overwrite_table_with_condition(
+ self, table_name: str, data: pl.DataFrame | pl.LazyFrame, predicate: str, overwrite_schema: bool = False
+ ):
+ span = trace.get_current_span()
+
+ if isinstance(data, pl.LazyFrame):
+ data = data.collect()
+
+ span.set_attributes(
+ {
+ "row_count": data.height,
+ "database_name": self.database_definition.name,
+ "table_name": table_name,
+ "predicate": predicate,
+ }
+ )
+
+ self.ensure_initialised()
+
+ schema = self.pandera_schemas[table_name]
+
+ schema.validate(data, lazy=False)
+
+ path = self._get_table_path(table_name)
+
+ write_deltalake(
+ path,
+ data.to_arrow(), # type: ignore
+ mode="overwrite",
+ predicate=predicate,
+ schema_mode=None,
+ overwrite_schema=overwrite_schema,
+ storage_options=self.storage_options,
+ )
+
+ def append_to_table(self, table_name: str, data: pl.DataFrame | pl.LazyFrame):
+ span = trace.get_current_span()
+
+ if isinstance(data, pl.LazyFrame):
+ data = data.collect()
+
+ span.set_attributes(
+ {
+ "row_count": data.height,
+ "database_name": self.database_definition.name,
+ "table_name": table_name,
+ }
+ )
+
+ self.ensure_initialised()
+
+ self.pandera_schemas[table_name].validate(data, lazy=False)
+
+ path = self._get_table_path(table_name)
+
+ data.write_delta(
+ path,
+ mode="append",
+ storage_options=self.storage_options,
+ )
+
+ def ensure_all_rows_match(self, data: pl.DataFrame, column_name: str, value: str):
+ span = trace.get_current_span()
+ span.set_attributes({"column_name": column_name, "value": value})
+
+ if data.filter(pl.col(column_name) != value).height > 0:
+ raise ValueError(f"Column '{column_name}' must have value '{value}' for all rows")
+
+ def ensure_initialised(self):
+ if not self.initialised:
+ self.initialise_database()
+ self.initialised = True
+
+ def initialise_database(self):
+ database_location = self.storage_configuration.get_full_path(
+ self.data_lake_layer,
+ f"{self.base_path}/{self.database_definition.name}",
+ )
+
+ span = trace.get_current_span()
+
+ if (self.database_definition.tables is None) or (len(self.database_definition.tables) == 0):
+ span.set_attribute("initialisation_required", False)
+ return
+
+ span.set_attributes(
+ {
+ "initialisation_required": True,
+ "database_location": database_location,
+ "database_name": self.database_definition.name,
+ }
+ )
+
+ for table in self.database_definition.tables:
+ self.initialise_table(table)
+
+ def initialise_table(self, table: TableDefinition):
+ span = trace.get_current_span()
+ span.set_attributes({"database_name": self.database_definition.name, "table_name": table.name})
+
+ table_path = self._get_table_path(table.name)
+
+ try:
+ DeltaTable(table_path, storage_options=self.storage_options)
+ table_exists = True
+ except Exception:
+ table_exists = False
+
+ if not table_exists:
+ schema = table.schema
+ _: DeltaTable = DeltaTable.create(
+ table_path,
+ schema=pandera_polars_to_deltalake_schema(schema),
+ storage_options=self.storage_options,
+ )
+
+ def _get_table_path(self, table_name: str) -> str:
+ return self.storage_configuration.get_full_path(
+ self.data_lake_layer,
+ f"{self.base_path}/{self.database_definition.name}/{table_name}",
+ )
diff --git a/src/corvus_python/repositories/polars_excel_data_repository.py b/src/corvus_python/repositories/polars_excel_data_repository.py
new file mode 100644
index 0000000..e3be5c6
--- /dev/null
+++ b/src/corvus_python/repositories/polars_excel_data_repository.py
@@ -0,0 +1,67 @@
+import logging
+from typing import BinaryIO, cast
+from io import BytesIO
+import polars as pl
+import fsspec
+from opentelemetry import trace
+
+
+from ..storage import StorageConfiguration, DataLakeLayer
+from ..monitoring import all_methods_start_new_current_span_with_method_name
+
+tracer = trace.get_tracer(__name__)
+
+
+@all_methods_start_new_current_span_with_method_name(tracer)
+class PolarsExcelDataRepository:
+ def __init__(self, file_system_configuration: StorageConfiguration, data_lake_layer: DataLakeLayer, base_path: str):
+
+ self.file_system_configuration = file_system_configuration
+ self.base_path = base_path
+ self.data_lake_layer = data_lake_layer
+ self.logger = logging.getLogger(__name__)
+
+ def load_excel(
+ self, snapshot_timestamp: str, workbook_name: str, relative_path: str | None = None
+ ) -> dict[str, pl.DataFrame]:
+
+ self.logger.info("load_excel - Workbook name: %s, Snapshot timestamp: %s", workbook_name, snapshot_timestamp)
+
+ path = self.get_file_path(workbook_name, snapshot_timestamp, relative_path)
+
+ self.logger.info("load_excel - Target file path: %s", path)
+
+ if (
+ self.file_system_configuration.storage_options
+ and self.file_system_configuration.storage_options.get("azure_storage_account_name", None) is not None
+ ):
+ self.logger.info(
+ "load_excel - Using Azure storage account: %s",
+ self.file_system_configuration.storage_options["azure_storage_account_name"],
+ )
+
+ storage_options = {
+ "azure_storage_account_name": self.file_system_configuration.storage_options[
+ "azure_storage_account_name"
+ ],
+ "anon": False,
+ }
+ else:
+ storage_options = self.file_system_configuration.storage_options or {}
+
+ with fsspec.open(path, **storage_options) as f:
+ f = cast(BinaryIO, f)
+ workbook_bytes = f.read()
+
+ worksheets = pl.read_excel(BytesIO(workbook_bytes), sheet_id=0, engine="calamine")
+
+ return worksheets
+
+ def get_file_path(self, workbook_name: str, snapshot_timestamp: str, relative_path: str | None = None) -> str:
+ base_path = self.base_path if relative_path is None else f"{self.base_path}/{relative_path}"
+
+ path = self.file_system_configuration.get_full_path(
+ self.data_lake_layer, f"{base_path}/snapshot_time={snapshot_timestamp}/{workbook_name}.xlsx"
+ )
+
+ return path
diff --git a/src/corvus_python/repositories/polars_ndjson_data_repository.py b/src/corvus_python/repositories/polars_ndjson_data_repository.py
new file mode 100644
index 0000000..2acae93
--- /dev/null
+++ b/src/corvus_python/repositories/polars_ndjson_data_repository.py
@@ -0,0 +1,65 @@
+import polars as pl
+import logging
+from opentelemetry import trace
+
+from ..storage import StorageConfiguration, DataLakeLayer
+from ..monitoring import all_methods_start_new_current_span_with_method_name
+
+tracer = trace.get_tracer(__name__)
+
+
+@all_methods_start_new_current_span_with_method_name(tracer)
+class PolarsNdJsonDataRepository:
+ def __init__(
+ self,
+ file_system_configuration: StorageConfiguration,
+ data_lake_layer: DataLakeLayer,
+ base_path: str,
+ ):
+
+ self.file_system_configuration = file_system_configuration
+ self.base_path = base_path
+ self.data_lake_layer = data_lake_layer
+ self.logger = logging.getLogger(__name__)
+
+ def load_ndjson(
+ self,
+ object_name: str,
+ load_type: str,
+ snapshot_timestamp: str,
+ include_file_paths: str | None = None,
+ schema_overrides: dict[str, pl.DataType] | None = None,
+ schema: dict[str, pl.DataType] | None = None,
+ ) -> pl.DataFrame:
+
+ self.logger.info(
+ "load_ndjson - Object name: %s, Type: %s, Snapshot timestamp: %s",
+ object_name,
+ load_type,
+ snapshot_timestamp,
+ )
+
+ # If object name has .json suffix, strip it off
+ if object_name.endswith(".json"):
+ object_name = object_name[:-5]
+
+ path = self._get_json_file_path(object_name, load_type, snapshot_timestamp)
+
+ self.logger.info("load_ndjson - Target file path: %s", path)
+
+ return pl.scan_ndjson(
+ path,
+ storage_options=self.file_system_configuration.storage_options,
+ include_file_paths=include_file_paths,
+ schema_overrides=schema_overrides,
+ infer_schema_length=None,
+ schema=schema,
+ ).collect()
+
+ def _get_json_file_path(self, object_name: str, load_type: str, snapshot_timestamp: str) -> str:
+ path = self.file_system_configuration.get_full_path(
+ self.data_lake_layer,
+ f"{self.base_path}/{load_type}/snapshot_time={snapshot_timestamp}/{object_name}.json",
+ )
+
+ return path
diff --git a/src/corvus_python/schema/__init__.py b/src/corvus_python/schema/__init__.py
new file mode 100644
index 0000000..b58e846
--- /dev/null
+++ b/src/corvus_python/schema/__init__.py
@@ -0,0 +1,2 @@
+from .pandera_to_deltalake_schema import pandera_polars_to_deltalake_schema # noqa: F401
+from .pandera_to_polars_schema import pandera_to_polars_schema # noqa: F401
diff --git a/src/corvus_python/schema/pandera_to_deltalake_schema.py b/src/corvus_python/schema/pandera_to_deltalake_schema.py
new file mode 100644
index 0000000..a1f446e
--- /dev/null
+++ b/src/corvus_python/schema/pandera_to_deltalake_schema.py
@@ -0,0 +1,62 @@
+import pandera.polars as pa
+from deltalake.schema import Field as DeltaField
+from deltalake.schema import PrimitiveType, Schema
+
+
+def pandera_polars_to_deltalake_schema(schema: pa.DataFrameSchema) -> Schema:
+ """Converts a Pandera Polars schema to a Delta Lake schema.
+
+ Args:
+ schema: A Pandera Polars schema object.
+
+ Returns:
+ Schema: A Delta Lake schema object representing the schema.
+
+ Raises:
+ ValueError: If a field type is not supported.
+ """
+ # Mapping of Polars types to Delta Lake PrimitiveType
+ type_mapping: dict[str, PrimitiveType] = {
+ "Utf8": PrimitiveType("string"),
+ "String": PrimitiveType("string"),
+ "Int64": PrimitiveType("long"),
+ "Int32": PrimitiveType("integer"),
+ "Float64": PrimitiveType("double"),
+ "Float32": PrimitiveType("float"),
+ "Boolean": PrimitiveType("boolean"),
+ "Date": PrimitiveType("date"),
+ # Note: Datetime types are handled separately below with startswith check
+ }
+
+ delta_fields: list[DeltaField] = []
+
+ for col in schema.columns.values():
+ column: pa.Column = col
+
+ field_name = column.name
+ polars_type = column.dtype
+
+ # Get the Delta Lake type
+ polars_type_str = str(polars_type)
+
+ if polars_type_str.startswith("Datetime"):
+ # All datetime types map to Delta Lake timestamp
+ delta_type = PrimitiveType("timestamp")
+ elif polars_type_str.startswith("Decimal"):
+ delta_type = PrimitiveType("double")
+ else:
+ delta_type: PrimitiveType | None = type_mapping.get(polars_type_str)
+ if delta_type is None:
+ raise ValueError(f"Unsupported type: {polars_type_str}")
+
+ # Create the Delta Lake field
+ delta_field: DeltaField = DeltaField(
+ name=field_name,
+ type=delta_type,
+ nullable=col.nullable,
+ metadata={},
+ )
+
+ delta_fields.append(delta_field)
+
+ return Schema(delta_fields)
diff --git a/src/corvus_python/schema/pandera_to_polars_schema.py b/src/corvus_python/schema/pandera_to_polars_schema.py
new file mode 100644
index 0000000..ddaaee9
--- /dev/null
+++ b/src/corvus_python/schema/pandera_to_polars_schema.py
@@ -0,0 +1,100 @@
+import polars as pl
+from pandera.polars import DataFrameModel
+from polars import Date, Float64, Int64, String, Datetime, Boolean, Decimal
+from typing import Type, Set, Dict, Any
+
+
+def pandera_to_polars_schema(cls: Type[DataFrameModel]) -> pl.Schema:
+ """
+ Generates a Polars Schema from a DataFrameModel class and its base classes, using their annotations.
+
+ This function iterates over the Method Resolution Order (MRO) of the provided class. It examines each base class
+ that is a subclass of DataFrameModel (excluding DataFrameModel itself). For each, it extracts annotated fields,
+ maps them to Polars DataTypes, and creates a Field for each. The function ensures uniqueness of each field in
+ the resulting schema.
+
+ Parameters
+ ----------
+ cls : DataFrameModel
+ The class inheriting from DataFrameModel for which the Polars schema is to be generated.
+
+ Returns
+ -------
+ pl.Schema
+ A Polars Schema with Fields for each annotated field in the class hierarchy.
+
+ Notes
+ -----
+ The function uses guard clauses for reduced nesting and improved readability.
+
+ Examples
+ --------
+ >>> import pandera as pa
+ >>> import polars as pl
+ >>> class MyDataFrameModel(pa.DataFrameModel):
+ ... name: pa.typing.Series[str]
+ ... age: pa.typing.Series[int]
+ >>> schema = pandera_to_polars_schema(MyDataFrameModel)
+ >>> print(schema)
+ {'name': String, 'age': Int64}
+ """
+ schema_dict: Dict[str, Any] = {}
+ added_fields: Set[str] = set()
+
+ type_mapping: dict = {
+ str: String,
+ "String": String,
+ int: Int64,
+ float: Float64,
+ bool: Boolean,
+ "Timestamp": Datetime("us"),
+ "datetime": Datetime("us"),
+ "Datetime": Datetime("us"),
+ "Date": Date,
+ "date": Date,
+ "Decimal": Decimal,
+ "decimal": Decimal,
+ # Add more mappings as needed for other pandera types like Categorical, etc.
+ }
+
+ for base in cls.__mro__:
+ if not issubclass(base, DataFrameModel) or base == DataFrameModel:
+ continue
+
+ for attr_name, attr_type in base.__annotations__.items():
+ # Extract the inner type from pandera.typing.Series (e.g., Series[str] -> str)
+ if hasattr(attr_type, "__args__") and attr_type.__args__:
+ inner_type = attr_type.__args__[0]
+ else:
+ inner_type = attr_type
+
+ # Determine the key for type_mapping lookup
+ # For built-in types (str, int, float, bool), inner_type itself is the key
+ # For types like pandera.typing.Timestamp, its __name__ might be 'Timestamp'
+ type_key = inner_type.__name__ if hasattr(inner_type, "__name__") else inner_type
+
+ polars_type = type_mapping.get(type_key, None)
+
+ if attr_name in added_fields or polars_type is None:
+ # If type is not directly mapped, try to infer from common Python types
+ if inner_type is str:
+ polars_type = String
+ elif inner_type is int:
+ polars_type = Int64
+ elif inner_type is float:
+ polars_type = Float64
+ elif inner_type is bool:
+ polars_type = Boolean
+ elif isinstance(inner_type, type) and issubclass(inner_type, (type(None),)): # Handle Optional types
+ continue
+ else:
+ print(f"Warning: No direct Polars mapping for Pandera type: {inner_type}")
+ polars_type = String
+
+ if attr_name in added_fields: # Skip if already added from a base class
+ continue
+
+ schema_dict[attr_name] = polars_type
+ added_fields.add(attr_name)
+
+ return pl.Schema(schema_dict)
diff --git a/src/corvus_python/sharepoint/sharepoint_utils.py b/src/corvus_python/sharepoint/sharepoint_utils.py
index 2a6a5cd..d52e030 100644
--- a/src/corvus_python/sharepoint/sharepoint_utils.py
+++ b/src/corvus_python/sharepoint/sharepoint_utils.py
@@ -57,6 +57,76 @@ def retrieve_image_as_base64(
return base64_image
+ @staticmethod
+ def assign_item_permissions(
+ sharepoint_tenant_fqdn: str,
+ sharepoint_site_name: str,
+ library_name: str,
+ item_path: str,
+ token: str,
+ recipients: list[dict[str, Any]],
+ write_permission: bool = False,
+ ) -> dict[str, Any]:
+ """Assigns permissions on an item in SharePoint.
+ See https://learn.microsoft.com/en-us/graph/api/driveitem-invite.
+
+ Args:
+ sharepoint_tenant_fqdn (str): FQDN of the SharePoint tenant to save the file to.
+ sharepoint_site_name (str): Name of the SharePoint site to save the file to.
+ library_name (str): Name of the library to save the file to (URL-encoded).
+ item_path (str): Full item path (relative to root folder) to use when saving. Don't start with slash.
+ token (str): Bearer token for the request.
+ recipients: Array of driveRecipient objects.
+ See https://learn.microsoft.com/en-us/graph/api/resources/driverecipient
+ write_permission (bool): Boolean indicating whether to assign write permission
+
+ Returns:
+ dict: Response as JSON.
+ """
+ headers = {
+ "Accept": "application/json",
+ "Content-Type": "application/json; charset=utf-8",
+ "Authorization": f"Bearer {token}",
+ }
+ roles = ["write"] if write_permission else ["read"]
+ body = {"requireSignIn": True, "sendInvitation": False, "roles": roles, "recipients": recipients}
+ drive_id = SharePointUtilities.get_drive_id(sharepoint_tenant_fqdn, sharepoint_site_name, library_name, headers)
+ url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/root:/{item_path}:/invite"
+ response = requests.post(url, headers=headers, json=body)
+ response.raise_for_status()
+ return response.json()
+
+ @staticmethod
+ def rename_file(
+ sharepoint_tenant_fqdn: str,
+ sharepoint_site_name: str,
+ library_name: str,
+ existing_file_path: str,
+ token: str,
+ new_file_name: str,
+ ) -> None:
+ """Renames a file in SharePoint.
+
+ Args:
+ sharepoint_tenant_fqdn (str): FQDN of the SharePoint tenant to save the file to.
+ sharepoint_site_name (str): Name of the SharePoint site to save the file to.
+ library_name (str): Name of the library to save the file to (URL-encoded).
+ existing_file_path (str): Full file path (relative to root folder) of the file to rename.
+ Don't start with slash.
+ token (str): Bearer token for the request.
+ new_file_name (str): The new file name (including extension).
+ """
+ headers = {
+ "Accept": "application/json",
+ "Content-Type": "application/json; charset=utf-8",
+ "Authorization": f"Bearer {token}",
+ }
+ body = {"name": new_file_name}
+ drive_id = SharePointUtilities.get_drive_id(sharepoint_tenant_fqdn, sharepoint_site_name, library_name, headers)
+ url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/items/root:/{existing_file_path}"
+ response = requests.patch(url, headers=headers, json=body)
+ response.raise_for_status()
+
@staticmethod
def get_sharepoint_path_segments(sharepoint_url: str):
"""
diff --git a/src/corvus_python/storage/azure_blob_file_storage.py b/src/corvus_python/storage/azure_blob_file_storage.py
new file mode 100644
index 0000000..94540ba
--- /dev/null
+++ b/src/corvus_python/storage/azure_blob_file_storage.py
@@ -0,0 +1,105 @@
+import os
+from io import BytesIO
+
+from azure.core.credentials import (
+ AzureNamedKeyCredential,
+ AzureSasCredential,
+ TokenCredential,
+)
+from azure.core.paging import ItemPaged
+from azure.storage.blob import BlobPrefix, ContainerClient
+from azure.storage.blob._blob_client import BlobClient
+from opentelemetry import trace
+
+from ..monitoring import (
+ add_attributes_to_current_span,
+ all_methods_start_new_current_span_with_method_name,
+ start_as_current_span_with_method_name,
+)
+from ..storage.file_storage import FileStorage
+
+tracer = trace.get_tracer(__name__)
+
+
+@all_methods_start_new_current_span_with_method_name(tracer)
+class AzureBlobFileStorage(FileStorage):
+ def __init__(self, container_client: ContainerClient):
+ super().__init__()
+ self._container_client = container_client
+
+ def get_file_bytes(self, filename: str) -> BytesIO:
+ blob = self._container_client.get_blob_client(filename)
+ downloader = blob.download_blob()
+ bytes = BytesIO()
+ downloader.readinto(bytes)
+ bytes.seek(0)
+ return bytes
+
+ def get_matching_file_names(self, filename_prefix: str) -> list[str]:
+ add_attributes_to_current_span(filename_prefix=filename_prefix)
+
+ folder_path = os.path.dirname(filename_prefix)
+
+ matching_files: ItemPaged[str] = self._container_client.list_blob_names(name_starts_with=filename_prefix)
+
+ return [file for file in matching_files if os.path.dirname(file) == folder_path]
+
+ def get_single_matching_file_name(self, filename_prefix: str) -> str:
+ return self._get_single_matching_name_for_file_prefix(filename_prefix)
+
+ def get_single_matching_file_bytes(self, filename_prefix: str) -> BytesIO:
+ full_name = self._get_single_matching_name_for_file_prefix(filename_prefix)
+ return self.get_file_bytes(full_name)
+
+ def get_latest_matching_file_name(self, filename_prefix: str) -> str:
+ matching_files: list[str] = self.get_matching_file_names(filename_prefix)
+ if not matching_files:
+ raise FileNotFoundError(f"No files found with prefix '{filename_prefix}'")
+ latest_file: str = max(matching_files)
+ add_attributes_to_current_span(latest_file=latest_file)
+ return latest_file
+
+ def get_latest_matching_file_bytes(self, filename_prefix: str) -> BytesIO:
+ latest_file = self.get_latest_matching_file_name(filename_prefix)
+ return self.get_file_bytes(latest_file)
+
+ def write_file(self, file_name: str, file_bytes: bytes) -> None:
+ blob: BlobClient = self._container_client.get_blob_client(file_name)
+ blob.upload_blob(file_bytes, overwrite=True)
+
+ def list_subfolders(self, folder_path: str) -> list[str]:
+ if folder_path and not folder_path.endswith("/"):
+ folder_path = folder_path + "/"
+ blobs = self._container_client.walk_blobs(name_starts_with=folder_path, delimiter="/")
+ return [prefix.name.rstrip("/").split("/")[-1] for prefix in blobs if isinstance(prefix, BlobPrefix)]
+
+ def _get_single_matching_name_for_file_prefix(self, filename_prefix: str) -> str:
+ matching_files = self.get_matching_file_names(filename_prefix)
+
+ if len(matching_files) == 0:
+ raise FileNotFoundError(f"No files found with prefix '{filename_prefix}'")
+
+ if len(matching_files) > 1:
+ raise ValueError(
+ f"Multiple files found with prefix '{filename_prefix}'. "
+ f"Found {len(matching_files)}, expected only 1."
+ )
+
+ return matching_files[0]
+
+
+@start_as_current_span_with_method_name(tracer)
+def build_azure_blob_container_client(
+ credential: str | dict[str, str] | AzureNamedKeyCredential | AzureSasCredential | TokenCredential | None,
+ storage_account_name: str,
+ container_name: str,
+) -> ContainerClient:
+
+ span = trace.get_current_span()
+ span.set_attribute("container_name", container_name)
+
+ account_url = f"https://{storage_account_name}.blob.core.windows.net"
+
+ span.set_attribute("account_url", account_url)
+
+ return ContainerClient(account_url, container_name, credential)
diff --git a/src/corvus_python/storage/file_storage.py b/src/corvus_python/storage/file_storage.py
new file mode 100644
index 0000000..72382d3
--- /dev/null
+++ b/src/corvus_python/storage/file_storage.py
@@ -0,0 +1,31 @@
+from abc import ABC
+from io import BytesIO
+
+
+class FileStorage(ABC):
+ def __init__(self) -> None:
+ pass
+
+ def get_file_bytes(self, filename: str) -> BytesIO:
+ raise NotImplementedError("get_file_bytes method not implemented")
+
+ def get_matching_file_names(self, filename_prefix: str) -> list[str]:
+ raise NotImplementedError("get_matching_file_names method not implemented")
+
+ def get_latest_matching_file_name(self, filename_prefix: str) -> str:
+ raise NotImplementedError("get_latest_matching_file_name method not implemented")
+
+ def get_latest_matching_file_bytes(self, filename_prefix: str) -> BytesIO:
+ raise NotImplementedError("get_latest_matching_file_bytes method not implemented")
+
+ def get_single_matching_file_name(self, filename_prefix: str) -> str:
+ raise NotImplementedError("get_single_matching_file_name method not implemented")
+
+ def get_single_matching_file_bytes(self, filename_prefix: str) -> BytesIO:
+ raise NotImplementedError("get_single_matching_file_bytes method not implemented")
+
+ def write_file(self, file_name: str, file_bytes: bytes) -> None:
+ raise NotImplementedError("write_file method not implemented")
+
+ def list_subfolders(self, folder_path: str) -> list[str]:
+ raise NotImplementedError("list_subfolders method not implemented")
diff --git a/src/corvus_python/storage/local_file_storage.py b/src/corvus_python/storage/local_file_storage.py
new file mode 100644
index 0000000..f9bbc4c
--- /dev/null
+++ b/src/corvus_python/storage/local_file_storage.py
@@ -0,0 +1,95 @@
+import os
+from io import BytesIO
+
+from opentelemetry import trace
+
+from ..monitoring import (
+ add_attributes_to_current_span,
+ all_methods_start_new_current_span_with_method_name,
+)
+from ..storage.file_storage import FileStorage
+
+tracer = trace.get_tracer(__name__)
+
+
+@all_methods_start_new_current_span_with_method_name(tracer)
+class LocalFileStorage(FileStorage):
+ def __init__(self, base_path: str):
+ super().__init__()
+ self._base_path = base_path
+
+ def get_file_bytes(self, filename: str) -> BytesIO:
+ path = self._get_file_path(filename)
+
+ with open(path, "rb") as file:
+ bytes = BytesIO(file.read())
+ bytes.seek(0)
+ return bytes
+
+ def get_matching_file_names(self, filename_prefix: str) -> list[str]:
+ add_attributes_to_current_span(filename_prefix=filename_prefix)
+
+ full_prefix = os.path.join(self._base_path, filename_prefix)
+ parent_folder = os.path.dirname(full_prefix)
+ parent_folder_without_prefix = os.path.relpath(parent_folder, self._base_path)
+
+ files_in_target_folder = os.listdir(parent_folder)
+ target_file_prefix = os.path.basename(full_prefix)
+
+ return [
+ os.path.join(parent_folder_without_prefix, file)
+ for file in files_in_target_folder
+ if file.startswith(target_file_prefix)
+ ]
+
+ def get_latest_matching_file_name(self, filename_prefix: str) -> str:
+ add_attributes_to_current_span(filename_prefix=filename_prefix)
+ matching_files: list[str] = self.get_matching_file_names(filename_prefix)
+ if len(matching_files) == 0:
+ raise FileNotFoundError(f"No files found with prefix '{filename_prefix}'")
+ latest_file: str = max(matching_files)
+ add_attributes_to_current_span(latest_file=latest_file)
+ return latest_file
+
+ def get_latest_matching_file_bytes(self, filename_prefix: str) -> BytesIO:
+ latest_file = self.get_latest_matching_file_name(filename_prefix)
+ return self.get_file_bytes(latest_file)
+
+ def get_single_matching_file_name(self, filename_prefix: str) -> str:
+ matching_files = self.get_matching_file_names(filename_prefix)
+
+ if len(matching_files) == 0:
+ raise FileNotFoundError(f"No files found with prefix '{filename_prefix}'")
+
+ if len(matching_files) > 1:
+ raise ValueError(
+ f"Multiple files found with prefix '{filename_prefix}'. "
+ f"Found {len(matching_files)}, expected only 1."
+ )
+
+ return matching_files[0]
+
+ def get_single_matching_file_bytes(self, filename_prefix: str) -> BytesIO:
+ full_name = self.get_single_matching_file_name(filename_prefix)
+ return self.get_file_bytes(full_name)
+
+ def write_file(self, file_name: str, file_bytes: bytes) -> None:
+ path = self._get_file_path(file_name)
+
+ os.makedirs(os.path.dirname(path), exist_ok=True)
+
+ with open(path, "wb") as file:
+ file.write(file_bytes)
+
+ def list_subfolders(self, folder_path: str) -> list[str]:
+ if folder_path.startswith("/"):
+ folder_path = folder_path[1:]
+ abs_path = os.path.join(self._base_path, folder_path)
+ if not os.path.exists(abs_path):
+ return []
+ return [name for name in os.listdir(abs_path) if os.path.isdir(os.path.join(abs_path, name))]
+
+ def _get_file_path(self, filename: str) -> str:
+ add_attributes_to_current_span(filename_prefix=filename)
+
+ return os.path.join(self._base_path, filename)
diff --git a/src/corvus_python/storage/sharepoint_file_storage.py b/src/corvus_python/storage/sharepoint_file_storage.py
new file mode 100644
index 0000000..4b14873
--- /dev/null
+++ b/src/corvus_python/storage/sharepoint_file_storage.py
@@ -0,0 +1,123 @@
+import os
+from io import BytesIO
+from typing import Any
+
+import requests
+from opentelemetry import trace
+
+from ..sharepoint import SharePointUtilities
+from ..monitoring import (
+ add_attributes_to_current_span,
+ all_methods_start_new_current_span_with_method_name,
+)
+from ..storage.file_storage import FileStorage
+
+tracer = trace.get_tracer(__name__)
+
+
+@all_methods_start_new_current_span_with_method_name(tracer)
+class SharePointFileStorage(FileStorage):
+ def __init__(
+ self, sharepoint_tenant_fqdn: str, sharepoint_site_name: str, library_name: str, auth_token: str
+ ) -> None:
+ super().__init__()
+ self._sharepoint_tenant_fqdn = sharepoint_tenant_fqdn
+ self._sharepoint_site_name = sharepoint_site_name
+ self._library_name = library_name
+ self._auth_token = auth_token
+ self._headers: dict[str, str] = {
+ "Accept": "application/json",
+ "Content-Type": "application/json; charset=utf-8",
+ "Authorization": f"Bearer {auth_token}",
+ }
+ self.drive_id: str = SharePointUtilities.get_drive_id(
+ sharepoint_tenant_fqdn=self._sharepoint_tenant_fqdn,
+ sharepoint_site_name=self._sharepoint_site_name,
+ library_name=self._library_name,
+ headers=self._headers,
+ )
+
+ def get_file_bytes(self, filename: str) -> BytesIO:
+ download_url = self._get_file_path(filename)
+ response = requests.get(download_url)
+ response.raise_for_status()
+ bytes_io = BytesIO(response.content)
+ bytes_io.seek(0)
+ return bytes_io
+
+ def get_matching_file_names(self, filename_prefix: str) -> list[str]:
+ add_attributes_to_current_span(filename_prefix=filename_prefix)
+
+ # Parse the prefix to extract folder path and file prefix
+ folder_path = os.path.dirname(filename_prefix)
+ file_prefix = os.path.basename(filename_prefix)
+
+ # Get all files in the folder
+ files_in_folder_response = SharePointUtilities.get_download_urls_for_files_in_folder(
+ drive_id=self.drive_id,
+ folder_name=folder_path if folder_path else "",
+ token=self._auth_token,
+ )
+ # Type assertion: The method returns a list despite its return type annotation
+ files_in_folder: list[dict[str, Any]] = files_in_folder_response # type: ignore
+
+ # Filter files that match the prefix
+ matching_files = [
+ os.path.join(folder_path, str(file["name"])) if folder_path else str(file["name"])
+ for file in files_in_folder
+ if str(file["name"]).startswith(file_prefix) and "file" in file
+ ]
+
+ return matching_files
+
+ def get_latest_matching_file_name(self, filename_prefix: str) -> str:
+ matching_files = self.get_matching_file_names(filename_prefix)
+
+ if not matching_files:
+ raise FileNotFoundError(f"No files found with prefix '{filename_prefix}'")
+
+ latest_file = max(matching_files)
+ add_attributes_to_current_span(latest_file=latest_file)
+ return latest_file
+
+ def get_latest_matching_file_bytes(self, filename_prefix: str) -> BytesIO:
+ latest_file = self.get_latest_matching_file_name(filename_prefix)
+ return self.get_file_bytes(latest_file)
+
+ def get_single_matching_file_name(self, filename_prefix: str) -> str:
+ return self._get_single_matching_file_name_for_prefix(filename_prefix)
+
+ def get_single_matching_file_bytes(self, filename_prefix: str) -> BytesIO:
+ file_name = self._get_single_matching_file_name_for_prefix(filename_prefix)
+ return self.get_file_bytes(file_name)
+
+ def write_file(self, file_name: str, file_bytes: bytes) -> None:
+ SharePointUtilities.save_file(self.drive_id, file_name, self._auth_token, bytearray(file_bytes))
+
+ def list_subfolders(self, folder_path: str) -> list[str]:
+ items = SharePointUtilities.get_items_in_folder(
+ drive_id=self.drive_id, folder_path=folder_path, token=self._auth_token
+ )
+ folders: list[dict[str, str]] = [item for item in items if "folder" in item] # type: ignore
+ return [folder["webUrl"].split("/")[-1] for folder in folders]
+
+ def _get_single_matching_file_name_for_prefix(self, filename_prefix: str) -> str:
+ matching_files = self.get_matching_file_names(filename_prefix)
+
+ if len(matching_files) == 0:
+ raise FileNotFoundError(f"No files found with prefix '{filename_prefix}'")
+
+ if len(matching_files) > 1:
+ raise ValueError(
+ f"Multiple files found with prefix '{filename_prefix}'. "
+ f"Found {len(matching_files)}, expected only 1."
+ )
+
+ return matching_files[0]
+
+ def _get_file_path(self, filename: str) -> str:
+ return SharePointUtilities.get_file_download_url(
+ drive_id=self.drive_id,
+ file_name=filename,
+ token=self._auth_token,
+ )
diff --git a/src/corvus_python/testing/__init__.py b/src/corvus_python/testing/__init__.py
new file mode 100644
index 0000000..7f0a66d
--- /dev/null
+++ b/src/corvus_python/testing/__init__.py
@@ -0,0 +1,11 @@
+from .polars_testing_utils import (
+ behave_table_to_polars_dataframe_with_inferred_schema,
+ behave_table_to_dictionary_by_row,
+ compare_polars_dataframes,
+)
+
+__all__ = [
+ "behave_table_to_polars_dataframe_with_inferred_schema",
+ "behave_table_to_dictionary_by_row",
+ "compare_polars_dataframes",
+]
diff --git a/src/corvus_python/testing/polars_testing_utils.py b/src/corvus_python/testing/polars_testing_utils.py
new file mode 100644
index 0000000..dee66a6
--- /dev/null
+++ b/src/corvus_python/testing/polars_testing_utils.py
@@ -0,0 +1,222 @@
+"""
+Utility functions for testing: Behave table to Polars DataFrame conversions and comparisons.
+"""
+
+from typing import List
+import polars as pl
+import polars.testing as pl_testing
+from behave.model import Table
+from opentelemetry import trace
+
+from ..monitoring import start_as_current_span_with_method_name
+
+tracer = trace.get_tracer(__name__)
+
+
+@start_as_current_span_with_method_name(tracer)
+def behave_table_to_polars_dataframe(table: Table) -> pl.DataFrame:
+ """
+ Converts a Behave table to a Polars DataFrame.
+ This function infers the schema if column types are not explicitly provided
+ in the headings (e.g., "column_name:type").
+ Args:
+ table: The Behave table object.
+ Returns:
+ A Polars DataFrame.
+ """
+ if ":" in table.headings[0]:
+ return behave_table_to_polars_dataframe_with_explicit_schema(table)
+ else:
+ return behave_table_to_polars_dataframe_with_inferred_schema(table)
+
+
+@start_as_current_span_with_method_name(tracer)
+def behave_table_to_polars_dataframe_with_explicit_schema(table: Table) -> pl.DataFrame:
+ cols: List[List[str]] = [h.split(":", 1) for h in table.headings] # Split only on first colon
+ if any(len(c) != 2 for c in cols):
+ raise ValueError("field_name:field_type expected in table headings")
+
+ cols_tuples = [(name, field_type) for name, field_type in cols]
+ schema = {name: _string_to_polars_type(field_type) for name, field_type in cols_tuples}
+ rows: list[dict[str, str]] = [
+ {name: cell for (name, _), cell in zip(cols_tuples, row.cells)} for row in table # type: ignore[arg-type]
+ ]
+
+ if not rows:
+ df = pl.DataFrame(schema=schema)
+ else:
+ df = pl.DataFrame(rows)
+
+ for name, field_type in cols:
+ df = df.with_columns(pl.when(pl.col(name) == "nan").then(None).otherwise(pl.col(name)).alias(name))
+ try:
+ if field_type.lower().startswith("struct<") or field_type.lower().startswith("array<"):
+ dtype = _parse_polars_struct_dtype(field_type)
+ df = df.with_columns(pl.col(name).str.json_decode(dtype).alias(name))
+ elif field_type.lower() == "date":
+ df = df.with_columns(pl.col(name).str.to_date())
+ elif field_type.lower().startswith("date("):
+ format_str = (
+ field_type.split("(")[1].strip(")").replace("yyyy", "%Y").replace("MM", "%m").replace("dd", "%d")
+ )
+ df = df.with_columns(pl.col(name).str.to_date(format=format_str))
+ elif field_type.lower() in ["datetime", "timestamp"]:
+ df = df.with_columns(
+ pl.col(name)
+ .str.replace(" ", "T", literal=True)
+ .str.strptime(
+ pl.Datetime(time_zone="UTC"),
+ format="%Y-%m-%dT%H:%M:%S%.f",
+ strict=False,
+ )
+ )
+ elif field_type.lower().startswith("decimal("):
+ scale_part = field_type.split(",")[1]
+ scale = int(scale_part.strip(")"))
+ precision_part = field_type.split("(")[1]
+ precision = int(precision_part.split(",")[0])
+ df = df.with_columns(pl.col(name).cast(pl.Decimal(precision=precision, scale=scale)))
+ elif schema[name] == pl.Boolean:
+ df = df.with_columns(
+ pl.when(pl.col(name) == "True")
+ .then(True)
+ .when(pl.col(name) == "False")
+ .then(False)
+ .otherwise(None)
+ .alias(name)
+ .cast(pl.Boolean)
+ )
+ elif schema[name] == pl.Decimal:
+ df = df.with_columns(pl.col(name).cast(pl.Decimal(scale=2)))
+ elif schema[name] != pl.Object:
+ df = df.with_columns(pl.col(name).cast(schema[name]))
+ except Exception as e:
+ raise ValueError(f"Error converting column {name} to type {field_type}: {e}")
+ return df
+
+
+@start_as_current_span_with_method_name(tracer)
+def behave_table_to_polars_dataframe_with_inferred_schema(table: Table) -> pl.DataFrame:
+ """
+ Converts a Behave table to a Polars DataFrame with inferred schema.
+ Args:
+ table: The Behave table object.
+ Returns:
+ A Polars DataFrame with inferred schema.
+ """
+ headings = table.headings
+ rows = [{headings[i]: cell for i, cell in enumerate(row.cells)} for row in table] # type: ignore[list-item]
+ for row in rows:
+ for key, value in row.items():
+ if value == "":
+ row[key] = None
+ return pl.DataFrame(rows)
+
+
+@start_as_current_span_with_method_name(tracer)
+def behave_table_to_dictionary_by_row(table: Table) -> dict[str, str]:
+ """
+ Converts a Behave table with two columns into a dictionary.
+ The first column is used as keys and the second as values.
+ Args:
+ table: The Behave table object (expected to have two columns).
+ Returns:
+ A dictionary where keys are from the first column and values are from the second.
+ """
+ return {row.cells[0]: row.cells[1] for row in table}
+
+
+@start_as_current_span_with_method_name(tracer)
+def compare_polars_dataframes(
+ expected: pl.DataFrame,
+ actual: pl.DataFrame,
+ check_like: bool = True,
+ check_row_order: bool = True,
+ ignore_missing_columns: bool = False,
+):
+ """
+ Compares two Polars DataFrames for equality.
+ Args:
+ expected: The expected Polars DataFrame.
+ actual: The actual Polars DataFrame.
+ check_like: If True, column order will not be checked.
+ check_row_order: If True, row order will be checked.
+ ignore_missing_columns: If True, columns that are present in expected but missing in actual will be ignored.
+ """
+
+ if ignore_missing_columns:
+ actual = actual.select(expected.columns)
+
+ pl_testing.assert_frame_equal(
+ expected,
+ actual,
+ check_row_order=check_row_order,
+ check_column_order=not check_like,
+ )
+
+
+def _split_type_args(s: str) -> list[str]:
+ """Split comma-separated type arguments, respecting nested angle brackets."""
+ parts = []
+ depth = 0
+ current: list[str] = []
+ for ch in s:
+ if ch == "<":
+ depth += 1
+ current.append(ch)
+ elif ch == ">":
+ depth -= 1
+ current.append(ch)
+ elif ch == "," and depth == 0:
+ parts.append("".join(current).strip())
+ current = []
+ else:
+ current.append(ch)
+ if current:
+ parts.append("".join(current).strip())
+ return parts
+
+
+def _parse_polars_struct_dtype(type_str: str) -> pl.DataType:
+ """Recursively parse a type string such as 'struct' or
+ 'array>' into a Polars DataType."""
+ type_str = type_str.strip()
+ lower = type_str.lower()
+ if lower.startswith("struct<") and lower.endswith(">"):
+ inner = type_str[7:-1]
+ struct_fields: dict[str, pl.DataType] = {}
+ for field in _split_type_args(inner):
+ field_name, field_type = field.split(":", 1)
+ struct_fields[field_name.strip()] = _parse_polars_struct_dtype(field_type.strip())
+ return pl.Struct(struct_fields)
+ if lower.startswith("array<") and lower.endswith(">"):
+ inner = type_str[6:-1]
+ return pl.List(_parse_polars_struct_dtype(inner))
+ return _string_to_polars_type(type_str)
+
+
+def _string_to_polars_type(type_name: str) -> pl.DataType:
+ """
+ Converts a string representation of a type to a Polars DataType.
+ Args:
+ type_name: The string name of the type (e.g., "integer", "string", "date").
+ Returns:
+ The corresponding Polars DataType.
+ """
+ type_name_lower = type_name.lower()
+ if type_name_lower.startswith("date"):
+ return pl.Date()
+ type_map: dict[str, pl.DataType] = {
+ "integer": pl.Int64(),
+ "long": pl.Int64(),
+ "integer8": pl.Int8(),
+ "integer32": pl.Int32(),
+ "float": pl.Float64(),
+ "double": pl.Float64(),
+ "boolean": pl.Boolean(),
+ "timestamp": pl.Datetime(time_zone="UTC"),
+ "string": pl.Utf8(),
+ "object": pl.Object(),
+ "decimal": pl.Decimal(),
+ }
+ return type_map.get(type_name_lower, pl.Utf8())
diff --git a/tests/unit/test_azure_blob_file_storage.py b/tests/unit/test_azure_blob_file_storage.py
new file mode 100644
index 0000000..c295aa4
--- /dev/null
+++ b/tests/unit/test_azure_blob_file_storage.py
@@ -0,0 +1,328 @@
+import pytest
+from io import BytesIO
+from unittest.mock import MagicMock
+from azure.storage.blob import BlobPrefix
+from corvus_python.storage.azure_blob_file_storage import AzureBlobFileStorage
+
+
+@pytest.fixture
+def mock_container_client():
+ """Fixture providing a mocked ContainerClient."""
+ return MagicMock()
+
+
+@pytest.fixture
+def storage(mock_container_client):
+ """Fixture providing AzureBlobFileStorage instance with mocked container client."""
+ return AzureBlobFileStorage(mock_container_client)
+
+
+# region Tests for get_file_bytes
+
+
+def test_get_file_bytes_success(storage, mock_container_client):
+ """Test reading file bytes from a blob."""
+ blob_name = "logs/app_001.log"
+ test_content = b"Log entry 001\nStarted at 2026-03-11 10:00:00"
+
+ mock_blob_client = MagicMock()
+ mock_downloader = MagicMock()
+
+ def readinto_side_effect(stream):
+ stream.write(test_content)
+ return len(test_content)
+
+ mock_downloader.readinto.side_effect = readinto_side_effect
+ mock_blob_client.download_blob.return_value = mock_downloader
+ mock_container_client.get_blob_client.return_value = mock_blob_client
+
+ result = storage.get_file_bytes(blob_name)
+
+ assert isinstance(result, BytesIO)
+ assert result.tell() == 0 # Stream should be at position 0
+ content = result.read()
+ assert content == test_content
+ mock_container_client.get_blob_client.assert_called_once_with(blob_name)
+
+
+def test_get_file_bytes_stream_position(storage, mock_container_client):
+ """Test that returned BytesIO is positioned at the start."""
+ blob_name = "data/file.csv"
+ test_content = b"col1,col2,col3"
+
+ mock_blob_client = MagicMock()
+ mock_downloader = MagicMock()
+
+ def readinto_side_effect(stream):
+ stream.write(test_content)
+ return len(test_content)
+
+ mock_downloader.readinto.side_effect = readinto_side_effect
+ mock_blob_client.download_blob.return_value = mock_downloader
+ mock_container_client.get_blob_client.return_value = mock_blob_client
+
+ result = storage.get_file_bytes(blob_name)
+
+ assert result.tell() == 0
+ first_char = result.read(1)
+ assert first_char == b"c"
+
+
+# endregion
+
+# region Tests for get_matching_file_names
+
+
+def test_get_matching_file_names_multiple_matches(storage, mock_container_client):
+ """Test finding multiple blobs matching a prefix."""
+ prefix = "logs/app_"
+ matching_blobs = ["logs/app_001.log", "logs/app_002.log", "logs/app_003.log"]
+
+ mock_container_client.list_blob_names.return_value = iter(matching_blobs)
+
+ results = storage.get_matching_file_names(prefix)
+
+ assert len(results) == 3
+ assert "logs/app_001.log" in results
+ assert "logs/app_002.log" in results
+ assert "logs/app_003.log" in results
+ mock_container_client.list_blob_names.assert_called_once_with(name_starts_with=prefix)
+
+
+def test_get_matching_file_names_folder_scoped(storage, mock_container_client):
+ """Test that matching is scoped to the folder."""
+ prefix = "logs/app_"
+ # Include a blob from a subfolder to verify it's filtered out
+ all_matching = ["logs/app_001.log", "logs/app_002.log", "logs/archive/app_old.log"]
+
+ mock_container_client.list_blob_names.return_value = iter(all_matching)
+
+ results = storage.get_matching_file_names(prefix)
+
+ assert len(results) == 2
+ assert "logs/app_001.log" in results
+ assert "logs/app_002.log" in results
+ assert "logs/archive/app_old.log" not in results
+
+
+def test_get_matching_file_names_no_matches(storage, mock_container_client):
+ """Test that empty list is returned when no files match."""
+ prefix = "logs/nonexistent_"
+
+ mock_container_client.list_blob_names.return_value = iter([])
+
+ results = storage.get_matching_file_names(prefix)
+
+ assert len(results) == 0
+ assert isinstance(results, list)
+
+
+# endregion
+
+# region Tests for get_latest_matching_file_name
+
+
+def test_get_latest_matching_file_name_success(storage, mock_container_client):
+ """Test getting the latest matching blob."""
+ prefix = "logs/app_"
+ matching_blobs = ["logs/app_001.log", "logs/app_003.log", "logs/app_002.log"]
+
+ mock_container_client.list_blob_names.return_value = iter(matching_blobs)
+
+ result = storage.get_latest_matching_file_name(prefix)
+
+ assert result == "logs/app_003.log"
+
+
+def test_get_latest_matching_file_name_no_matches(storage, mock_container_client):
+ """Test that FileNotFoundError is raised when no files match."""
+ prefix = "logs/nonexistent_"
+
+ mock_container_client.list_blob_names.return_value = iter([])
+
+ with pytest.raises(FileNotFoundError) as exc_info:
+ storage.get_latest_matching_file_name(prefix)
+
+ assert "No files found" in str(exc_info.value)
+
+
+# endregion
+
+# region Tests for get_latest_matching_file_bytes
+
+
+def test_get_latest_matching_file_bytes_success(storage, mock_container_client):
+ """Test getting bytes of the latest matching blob."""
+ prefix = "logs/app_"
+ matching_blobs = ["logs/app_001.log", "logs/app_003.log"]
+ test_content = b"Log entry 003"
+
+ # Mock list_blob_names
+ mock_container_client.list_blob_names.return_value = iter(matching_blobs)
+
+ # Mock get_blob_client and download
+ mock_blob_client = MagicMock()
+ mock_downloader = MagicMock()
+
+ def readinto_side_effect(stream):
+ stream.write(test_content)
+ return len(test_content)
+
+ mock_downloader.readinto.side_effect = readinto_side_effect
+ mock_blob_client.download_blob.return_value = mock_downloader
+ mock_container_client.get_blob_client.return_value = mock_blob_client
+
+ result = storage.get_latest_matching_file_bytes(prefix)
+
+ assert isinstance(result, BytesIO)
+ content = result.read()
+ assert content == test_content
+
+
+# endregion
+
+# region Tests for get_single_matching_file_name
+
+
+def test_get_single_matching_file_name_success(storage, mock_container_client):
+ """Test getting single blob when exactly one matches."""
+ prefix = "data/data_"
+ matching_blobs = ["data/data_001.csv"]
+
+ mock_container_client.list_blob_names.return_value = iter(matching_blobs)
+
+ result = storage.get_single_matching_file_name(prefix)
+
+ assert result == "data/data_001.csv"
+
+
+def test_get_single_matching_file_name_no_matches(storage, mock_container_client):
+ """Test that FileNotFoundError is raised when no blobs match."""
+ prefix = "logs/nonexistent_"
+
+ mock_container_client.list_blob_names.return_value = iter([])
+
+ with pytest.raises(FileNotFoundError) as exc_info:
+ storage.get_single_matching_file_name(prefix)
+
+ assert "No files found" in str(exc_info.value)
+
+
+def test_get_single_matching_file_name_multiple_matches(storage, mock_container_client):
+ """Test that ValueError is raised when multiple blobs match."""
+ prefix = "logs/app_"
+ matching_blobs = ["logs/app_001.log", "logs/app_002.log"]
+
+ mock_container_client.list_blob_names.return_value = iter(matching_blobs)
+
+ with pytest.raises(ValueError) as exc_info:
+ storage.get_single_matching_file_name(prefix)
+
+ assert "Multiple files found" in str(exc_info.value)
+ assert "expected only 1" in str(exc_info.value)
+
+
+# endregion
+
+# region Tests for write_file
+
+
+def test_write_file_success(storage, mock_container_client):
+ """Test writing file content to a blob."""
+ blob_name = "logs/app_004.log"
+ file_content = b"New log entry"
+
+ mock_blob_client = MagicMock()
+ mock_container_client.get_blob_client.return_value = mock_blob_client
+
+ storage.write_file(blob_name, file_content)
+
+ mock_container_client.get_blob_client.assert_called_once_with(blob_name)
+ mock_blob_client.upload_blob.assert_called_once_with(file_content, overwrite=True)
+
+
+def test_write_file_empty_content(storage, mock_container_client):
+ """Test writing empty content."""
+ blob_name = "logs/empty.log"
+ file_content = b""
+
+ mock_blob_client = MagicMock()
+ mock_container_client.get_blob_client.return_value = mock_blob_client
+
+ storage.write_file(blob_name, file_content)
+
+ mock_blob_client.upload_blob.assert_called_once_with(file_content, overwrite=True)
+
+
+# endregion
+
+# region Tests for list_subfolders
+
+
+def test_list_subfolders_success(storage, mock_container_client):
+ """Test listing virtual directories (folders)."""
+ folder_path = "sample_logs"
+
+ # Create mock BlobPrefix objects
+ mock_prefix1 = MagicMock(spec=BlobPrefix)
+ mock_prefix1.name = "sample_logs/archive/"
+
+ mock_prefix2 = MagicMock(spec=BlobPrefix)
+ mock_prefix2.name = "sample_logs/backups/"
+
+ # Create mock BlobProperties (actual blobs, should be filtered)
+ mock_blob = MagicMock()
+ mock_blob.name = "sample_logs/system.txt"
+
+ mock_container_client.walk_blobs.return_value = [mock_prefix1, mock_prefix2, mock_blob]
+
+ results = storage.list_subfolders(folder_path)
+
+ assert len(results) == 2
+ assert "archive" in results
+ assert "backups" in results
+ mock_container_client.walk_blobs.assert_called_once_with(name_starts_with="sample_logs/", delimiter="/")
+
+
+def test_list_subfolders_no_subfolders(storage, mock_container_client):
+ """Test listing when no folders exist."""
+ folder_path = "data"
+
+ # Only return blobs, no BlobPrefix (folders)
+ mock_blob = MagicMock()
+ mock_blob.name = "data/data_001.csv"
+
+ mock_container_client.walk_blobs.return_value = [mock_blob]
+
+ results = storage.list_subfolders(folder_path)
+
+ assert len(results) == 0
+
+
+def test_list_subfolders_empty_folder(storage, mock_container_client):
+ """Test listing subfolders from empty folder."""
+ folder_path = "empty"
+
+ mock_container_client.walk_blobs.return_value = []
+
+ results = storage.list_subfolders(folder_path)
+
+ assert len(results) == 0
+
+
+def test_list_subfolders_path_formatting(storage, mock_container_client):
+ """Test that folder path is properly formatted with trailing slash."""
+ folder_path = "logs/archive"
+
+ mock_prefix = MagicMock(spec=BlobPrefix)
+ mock_prefix.name = "logs/archive/old/"
+
+ mock_container_client.walk_blobs.return_value = [mock_prefix]
+
+ storage.list_subfolders(folder_path)
+
+ # Should add trailing slash
+ mock_container_client.walk_blobs.assert_called_once_with(name_starts_with="logs/archive/", delimiter="/")
+
+
+# endregion
diff --git a/tests/unit/test_data/local_file_storage/sample_data/data_001.csv b/tests/unit/test_data/local_file_storage/sample_data/data_001.csv
new file mode 100644
index 0000000..248f3b0
--- /dev/null
+++ b/tests/unit/test_data/local_file_storage/sample_data/data_001.csv
@@ -0,0 +1,3 @@
+col1,col2,col3
+a,b,c
+1,2,3
diff --git a/tests/unit/test_data/local_file_storage/sample_logs/app_001.txt b/tests/unit/test_data/local_file_storage/sample_logs/app_001.txt
new file mode 100644
index 0000000..dbb8113
--- /dev/null
+++ b/tests/unit/test_data/local_file_storage/sample_logs/app_001.txt
@@ -0,0 +1,2 @@
+Log entry 001
+Started at 2026-03-11 10:00:00
diff --git a/tests/unit/test_data/local_file_storage/sample_logs/app_002.txt b/tests/unit/test_data/local_file_storage/sample_logs/app_002.txt
new file mode 100644
index 0000000..b638267
--- /dev/null
+++ b/tests/unit/test_data/local_file_storage/sample_logs/app_002.txt
@@ -0,0 +1,2 @@
+Log entry 002
+Started at 2026-03-11 11:00:00
diff --git a/tests/unit/test_data/local_file_storage/sample_logs/app_003.txt b/tests/unit/test_data/local_file_storage/sample_logs/app_003.txt
new file mode 100644
index 0000000..180a714
--- /dev/null
+++ b/tests/unit/test_data/local_file_storage/sample_logs/app_003.txt
@@ -0,0 +1,2 @@
+Log entry 003
+Started at 2026-03-11 12:00:00
diff --git a/tests/unit/test_data/local_file_storage/sample_logs/archive/app_old.txt b/tests/unit/test_data/local_file_storage/sample_logs/archive/app_old.txt
new file mode 100644
index 0000000..7c53504
--- /dev/null
+++ b/tests/unit/test_data/local_file_storage/sample_logs/archive/app_old.txt
@@ -0,0 +1,2 @@
+Old log entry
+Started at 2026-02-01 10:00:00
diff --git a/tests/unit/test_data/local_file_storage/sample_logs/system.txt b/tests/unit/test_data/local_file_storage/sample_logs/system.txt
new file mode 100644
index 0000000..064654c
--- /dev/null
+++ b/tests/unit/test_data/local_file_storage/sample_logs/system.txt
@@ -0,0 +1,2 @@
+System log
+Event type: SYSTEM_START
diff --git a/tests/unit/test_local_file_storage.py b/tests/unit/test_local_file_storage.py
new file mode 100644
index 0000000..bfeceff
--- /dev/null
+++ b/tests/unit/test_local_file_storage.py
@@ -0,0 +1,313 @@
+import os
+import pytest
+from io import BytesIO
+from corvus_python.storage.local_file_storage import LocalFileStorage
+
+
+@pytest.fixture
+def storage():
+ """Fixture providing LocalFileStorage instance with test data directory."""
+ test_data_path = os.path.join(os.path.dirname(__file__), "test_data", "local_file_storage")
+ return LocalFileStorage(test_data_path)
+
+
+# region Tests for get_file_bytes
+
+
+def test_get_file_bytes_success(storage):
+ """Test reading file bytes from a valid file."""
+ result = storage.get_file_bytes("sample_logs/app_001.txt")
+
+ assert isinstance(result, BytesIO)
+
+ content = result.read()
+ assert b"Log entry 001" in content
+ assert b"2026-03-11 10:00:00" in content
+
+
+def test_get_file_bytes_nested_file(storage):
+ """Test reading file from nested directory."""
+ result = storage.get_file_bytes("sample_logs/archive/app_old.txt")
+
+ assert isinstance(result, BytesIO)
+ content = result.read()
+ assert b"Old log entry" in content
+
+
+def test_get_file_bytes_file_not_found(storage):
+ """Test that FileNotFoundError is raised for non-existent file."""
+ with pytest.raises(FileNotFoundError):
+ storage.get_file_bytes("sample_logs/nonexistent.txt")
+
+
+# endregion
+
+# region Tests for get_matching_file_names
+
+
+def test_get_matching_file_names_exact_prefix(storage):
+ """Test finding files matching exact prefix."""
+ results = storage.get_matching_file_names("sample_logs/app_")
+
+ assert len(results) == 3
+ assert "sample_logs/app_001.txt" in results
+ assert "sample_logs/app_002.txt" in results
+ assert "sample_logs/app_003.txt" in results
+ assert "sample_logs/system.txt" not in results
+
+
+def test_get_matching_file_names_folder_scoped(storage):
+ """Test that matching is scoped to the folder, not cross-folder."""
+ results = storage.get_matching_file_names("sample_logs/app_")
+
+ # Should not include files from archive subfolder
+ assert "sample_logs/archive/app_old.txt" not in results
+ assert len(results) == 3
+
+
+def test_get_matching_file_names_single_match(storage):
+ """Test matching with only one result."""
+ results = storage.get_matching_file_names("sample_logs/system")
+
+ assert len(results) == 1
+ assert "sample_logs/system.txt" in results
+
+
+def test_get_matching_file_names_no_matches(storage):
+ """Test that empty list is returned when no files match."""
+ results = storage.get_matching_file_names("sample_logs/nonexistent_")
+
+ assert len(results) == 0
+ assert isinstance(results, list)
+
+
+def test_get_matching_file_names_csv_files(storage):
+ """Test matching CSV files."""
+ results = storage.get_matching_file_names("sample_data/data_")
+
+ assert len(results) == 1
+ assert "sample_data/data_001.csv" in results
+
+
+# endregion
+
+# region Tests for get_latest_matching_file_name
+
+
+def test_get_latest_matching_file_name_success(storage):
+ """Test getting the latest matching file."""
+ result = storage.get_latest_matching_file_name("sample_logs/app_")
+
+ # Should return the lexicographically last match
+ assert result == "sample_logs/app_003.txt"
+
+
+def test_get_latest_matching_file_name_single_file(storage):
+ """Test getting latest when only one file matches."""
+ result = storage.get_latest_matching_file_name("sample_data/data_")
+
+ assert result == "sample_data/data_001.csv"
+
+
+def test_get_latest_matching_file_name_no_matches(storage):
+ """Test that FileNotFoundError is raised when no files match."""
+ with pytest.raises(FileNotFoundError) as exc_info:
+ storage.get_latest_matching_file_name("sample_logs/nonexistent_")
+
+ assert "No files found" in str(exc_info.value)
+
+
+# endregion
+
+# region Tests for get_latest_matching_file_bytes
+
+
+def test_get_latest_matching_file_bytes_success(storage):
+ """Test getting bytes of the latest matching file."""
+ result = storage.get_latest_matching_file_bytes("sample_logs/app_")
+
+ assert isinstance(result, BytesIO)
+ content = result.read()
+ assert b"Log entry 003" in content
+ assert b"2026-03-11 12:00:00" in content
+
+
+def test_get_latest_matching_file_bytes_stream_position(storage):
+ """Test that returned BytesIO is positioned at the start."""
+ result = storage.get_latest_matching_file_bytes("sample_logs/app_")
+
+ assert result.tell() == 0
+ first_char = result.read(1)
+ assert first_char == b"L"
+
+
+# endregion
+
+# region Tests for get_single_matching_file_name
+
+
+def test_get_single_matching_file_name_success(storage):
+ """Test getting single file when exactly one matches."""
+ result = storage.get_single_matching_file_name("sample_data/data_")
+
+ assert result == "sample_data/data_001.csv"
+
+
+def test_get_single_matching_file_name_no_matches(storage):
+ """Test that FileNotFoundError is raised when no files match."""
+ with pytest.raises(FileNotFoundError) as exc_info:
+ storage.get_single_matching_file_name("sample_logs/nonexistent_")
+
+ assert "No files found" in str(exc_info.value)
+
+
+def test_get_single_matching_file_name_multiple_matches(storage):
+ """Test that ValueError is raised when multiple files match."""
+ with pytest.raises(ValueError) as exc_info:
+ storage.get_single_matching_file_name("sample_logs/app_")
+
+ assert "Multiple files found" in str(exc_info.value)
+ assert "expected only 1" in str(exc_info.value)
+
+
+# endregion
+
+# region Tests for get_single_matching_file_bytes
+
+
+def test_get_single_matching_file_bytes_success(storage):
+ """Test getting bytes of single matching file."""
+ result = storage.get_single_matching_file_bytes("sample_data/data_")
+
+ assert isinstance(result, BytesIO)
+ content = result.read()
+ assert b"col1,col2,col3" in content
+
+
+def test_get_single_matching_file_bytes_multiple_matches(storage):
+ """Test that ValueError is raised when multiple files match."""
+ with pytest.raises(ValueError) as exc_info:
+ storage.get_single_matching_file_bytes("sample_logs/app_")
+
+ assert "Multiple files found" in str(exc_info.value)
+
+
+# endregion
+
+# region Tests for write_file
+
+
+def test_write_file_new_file(storage):
+ """Test writing a new file."""
+ test_path = "sample_logs/test_write_new.txt"
+ test_content = b"Test file content\nLine 2"
+
+ storage.write_file(test_path, test_content)
+
+ # Verify file was written
+ full_path = os.path.join(storage._base_path, test_path)
+ assert os.path.exists(full_path)
+
+ with open(full_path, "rb") as f:
+ written_content = f.read()
+
+ assert written_content == test_content
+
+ # Cleanup
+ os.remove(full_path)
+
+
+def test_write_file_overwrite_existing(storage):
+ """Test overwriting an existing file."""
+ test_path = "sample_logs/app_001.txt"
+ new_content = b"Overwritten content"
+
+ # Backup original
+ original_bytes = storage.get_file_bytes(test_path)
+ original_content = original_bytes.read()
+
+ storage.write_file(test_path, new_content)
+
+ # Verify overwrite
+ result = storage.get_file_bytes(test_path)
+ assert result.read() == new_content
+
+ # Restore original
+ storage.write_file(test_path, original_content)
+
+
+def test_write_file_creates_nested_directories(storage):
+ """Test that write_file creates necessary directories."""
+ test_path = "sample_logs/new_subdir/test_file.txt"
+ test_content = b"Content in nested directory"
+
+ storage.write_file(test_path, test_content)
+
+ # Verify file was written
+ full_path = os.path.join(storage._base_path, test_path)
+ assert os.path.exists(full_path)
+
+ with open(full_path, "rb") as f:
+ written_content = f.read()
+
+ assert written_content == test_content
+
+ # Cleanup
+ os.remove(full_path)
+ os.rmdir(os.path.dirname(full_path))
+
+
+def test_write_file_empty_content(storage):
+ """Test writing an empty file."""
+ test_path = "sample_logs/test_empty.txt"
+ test_content = b""
+
+ storage.write_file(test_path, test_content)
+
+ # Verify file was written
+ full_path = os.path.join(storage._base_path, test_path)
+ assert os.path.exists(full_path)
+ assert os.path.getsize(full_path) == 0
+
+ # Cleanup
+ os.remove(full_path)
+
+
+# endregion
+
+# region Tests for list_subfolders
+
+
+def test_list_subfolders_success(storage):
+ """Test listing subfolders."""
+ results = storage.list_subfolders("sample_logs")
+
+ assert isinstance(results, list)
+ assert "archive" in results
+
+
+def test_list_subfolders_no_subfolders(storage):
+ """Test listing subfolders when none exist."""
+ results = storage.list_subfolders("sample_data")
+
+ assert isinstance(results, list)
+ assert len(results) == 0
+
+
+def test_list_subfolders_nonexistent_folder(storage):
+ """Test listing subfolders for non-existent folder."""
+ results = storage.list_subfolders("nonexistent")
+
+ assert isinstance(results, list)
+ assert len(results) == 0
+
+
+def test_list_subfolders_with_leading_slash(storage):
+ """Test that leading slash in folder_path is handled."""
+ results = storage.list_subfolders("/sample_logs")
+
+ assert isinstance(results, list)
+ assert "archive" in results
+
+
+# endregion
diff --git a/tests/unit/test_polars_testing_utils.py b/tests/unit/test_polars_testing_utils.py
new file mode 100644
index 0000000..c87ab6a
--- /dev/null
+++ b/tests/unit/test_polars_testing_utils.py
@@ -0,0 +1,119 @@
+import pytest
+import polars as pl
+from behave.model import Table
+from corvus_python.testing.polars_testing_utils import (
+ behave_table_to_polars_dataframe,
+ behave_table_to_polars_dataframe_with_explicit_schema,
+ behave_table_to_polars_dataframe_with_inferred_schema,
+ behave_table_to_dictionary_by_row,
+ compare_polars_dataframes,
+ _string_to_polars_type,
+)
+
+
+class FakeRow:
+ def __init__(self, cells: list[str]):
+ self.cells = cells
+
+
+class FakeTable(Table):
+ def __init__(self, headings: list[str], rows: list[list[str]]):
+ self.headings = headings
+ self._rows = [FakeRow(row) for row in rows]
+
+ def __iter__(self):
+ return iter(self._rows)
+
+
+def test_behave_table_to_polars_dataframe_inferred():
+ table = FakeTable(["a", "b"], [["1", "2"], ["3", "4"]])
+ df = behave_table_to_polars_dataframe(table)
+ expected = pl.DataFrame({"a": ["1", "3"], "b": ["2", "4"]})
+ compare_polars_dataframes(expected, df)
+
+
+def test_behave_table_to_polars_dataframe_with_explicit_schema():
+ table = FakeTable(["a:integer", "b:string"], [["1", "foo"], ["2", "bar"]])
+ df = behave_table_to_polars_dataframe_with_explicit_schema(table)
+ expected = pl.DataFrame({"a": [1, 2], "b": ["foo", "bar"]})
+ compare_polars_dataframes(expected, df)
+
+
+def test_behave_table_to_polars_dataframe_with_inferred_schema():
+ table = FakeTable(["x", "y"], [["", "42"], ["hello", ""]])
+ df = behave_table_to_polars_dataframe_with_inferred_schema(table)
+ expected = pl.DataFrame({"x": [None, "hello"], "y": ["42", None]})
+ compare_polars_dataframes(expected, df)
+
+
+def test_behave_table_to_dictionary_by_row():
+ table = FakeTable(["key", "val"], [["foo", "bar"], ["baz", "qux"]])
+ d = behave_table_to_dictionary_by_row(table)
+ assert d == {"foo": "bar", "baz": "qux"}
+
+
+def test_compare_polars_dataframes_row_order():
+ expected = pl.DataFrame({"a": [1, 2], "b": ["x", "y"]})
+ actual = pl.DataFrame({"b": ["x", "y"], "a": [1, 2]})
+ compare_polars_dataframes(expected, actual, check_like=True, check_row_order=True)
+
+
+def test_compare_polars_dataframes_missing_columns():
+ expected = pl.DataFrame({"a": [1, 2]})
+ actual = pl.DataFrame({"a": [1, 2], "b": ["x", "y"]})
+ compare_polars_dataframes(expected, actual, check_like=True, check_row_order=True, ignore_missing_columns=True)
+
+
+def test__string_to_polars_type():
+ assert _string_to_polars_type("integer") == pl.Int64
+ assert _string_to_polars_type("date") == pl.Date
+ assert _string_to_polars_type("boolean") == pl.Boolean
+ assert _string_to_polars_type("string") == pl.Utf8
+ assert _string_to_polars_type("object") == pl.Object
+ assert _string_to_polars_type("decimal") == pl.Decimal
+ assert _string_to_polars_type("timestamp") == pl.Datetime(time_zone="UTC")
+ assert _string_to_polars_type("unknown") == pl.Utf8
+
+
+def test_explicit_schema_error():
+ table = FakeTable(["a", "b:string"], [["1", "foo"]])
+ with pytest.raises(ValueError):
+ behave_table_to_polars_dataframe_with_explicit_schema(table)
+
+
+def test_explicit_schema_struct():
+ table = FakeTable(
+ ["data:struct"],
+ [['{"name": "Alice", "age": 30}'], ['{"name": "Bob", "age": 25}']],
+ )
+ df = behave_table_to_polars_dataframe_with_explicit_schema(table)
+ assert df.schema["data"] == pl.Struct({"name": pl.Utf8, "age": pl.Int64})
+ assert df["data"][0] == {"name": "Alice", "age": 30}
+ assert df["data"][1] == {"name": "Bob", "age": 25}
+
+
+def test_explicit_schema_struct_empty_rows():
+ table = FakeTable(["data:struct"], [])
+ df = behave_table_to_polars_dataframe_with_explicit_schema(table)
+ assert df.schema["data"] == pl.Struct({"name": pl.Utf8, "age": pl.Int64})
+ assert len(df) == 0
+
+
+def test_explicit_schema_array_of_struct():
+ table = FakeTable(
+ ["items:array>"],
+ [['[{"id": 1, "label": "a"}, {"id": 2, "label": "b"}]'], ['[{"id": 3, "label": "c"}]']],
+ )
+ df = behave_table_to_polars_dataframe_with_explicit_schema(table)
+ assert df.schema["items"] == pl.List(pl.Struct({"id": pl.Int64, "label": pl.Utf8}))
+ assert df["items"].to_list() == [
+ [{"id": 1, "label": "a"}, {"id": 2, "label": "b"}],
+ [{"id": 3, "label": "c"}],
+ ]
+
+
+def test_explicit_schema_array_of_struct_empty_rows():
+ table = FakeTable(["items:array>"], [])
+ df = behave_table_to_polars_dataframe_with_explicit_schema(table)
+ assert df.schema["items"] == pl.List(pl.Struct({"id": pl.Int64, "label": pl.Utf8}))
+ assert len(df) == 0
diff --git a/tests/unit/test_sharepoint_file_storage.py b/tests/unit/test_sharepoint_file_storage.py
new file mode 100644
index 0000000..82d91ac
--- /dev/null
+++ b/tests/unit/test_sharepoint_file_storage.py
@@ -0,0 +1,364 @@
+import pytest
+import requests
+from io import BytesIO
+from unittest.mock import patch, Mock
+from corvus_python.storage.sharepoint_file_storage import SharePointFileStorage
+
+
+@pytest.fixture
+def mock_sharepoint_utilities():
+ """Fixture for patching SharePointUtilities."""
+ with patch("corvus_python.storage.sharepoint_file_storage.SharePointUtilities") as mock_util:
+ yield mock_util
+
+
+@pytest.fixture
+def storage(mock_sharepoint_utilities):
+ """Fixture providing SharePointFileStorage instance with mocked utilities."""
+ mock_sharepoint_utilities.get_drive_id.return_value = "fake_drive_id"
+
+ return SharePointFileStorage(
+ sharepoint_tenant_fqdn="tenant.sharepoint.com",
+ sharepoint_site_name="TestSite",
+ library_name="Documents",
+ auth_token="fake_token",
+ )
+
+
+# region Tests for get_file_bytes
+
+
+def test_get_file_bytes_success(storage, mock_sharepoint_utilities):
+ """Test reading file bytes from SharePoint."""
+ filename = "logs/app_001.log"
+ test_content = b"Log entry 001"
+ download_url = "https://example.sharepoint.com/download/file"
+
+ mock_sharepoint_utilities.get_file_download_url.return_value = download_url
+
+ with patch("requests.get") as mock_get:
+ mock_response = Mock()
+ mock_response.content = test_content
+ mock_response.raise_for_status = Mock()
+ mock_get.return_value = mock_response
+
+ result = storage.get_file_bytes(filename)
+
+ assert isinstance(result, BytesIO)
+ assert result.tell() == 0 # Stream should be at position 0
+ content = result.read()
+ assert content == test_content
+ mock_get.assert_called_once_with(download_url)
+
+
+def test_get_file_bytes_network_error(storage, mock_sharepoint_utilities):
+ """Test handling of HTTP errors."""
+ filename = "logs/app_001.log"
+ download_url = "https://example.sharepoint.com/download/file"
+
+ mock_sharepoint_utilities.get_file_download_url.return_value = download_url
+
+ with patch("requests.get") as mock_get:
+ mock_get.side_effect = requests.exceptions.HTTPError("404 Not Found")
+
+ with pytest.raises(requests.exceptions.HTTPError):
+ storage.get_file_bytes(filename)
+
+
+# endregion
+
+# region Tests for get_matching_file_names
+
+
+def test_get_matching_file_names_multiple_matches(storage, mock_sharepoint_utilities):
+ """Test finding multiple files matching a prefix."""
+ prefix = "sample_logs/app_"
+ mock_files = [
+ {"name": "app_001.txt", "file": {}},
+ {"name": "app_002.txt", "file": {}},
+ {"name": "app_003.txt", "file": {}},
+ ]
+
+ mock_sharepoint_utilities.get_download_urls_for_files_in_folder.return_value = mock_files
+
+ results = storage.get_matching_file_names(prefix)
+
+ assert len(results) == 3
+ assert "sample_logs/app_001.txt" in results
+ assert "sample_logs/app_002.txt" in results
+ assert "sample_logs/app_003.txt" in results
+ mock_sharepoint_utilities.get_download_urls_for_files_in_folder.assert_called_once_with(
+ drive_id="fake_drive_id", folder_name="sample_logs", token="fake_token"
+ )
+
+
+def test_get_matching_file_names_folder_scoped(storage, mock_sharepoint_utilities):
+ """Test that matching is scoped to the folder."""
+ prefix = "logs/app_"
+ mock_files = [
+ {"name": "app_001.log", "file": {}},
+ {"name": "app_002.log", "file": {}},
+ {"name": "system.log", "file": {}}, # Should be filtered
+ ]
+
+ mock_sharepoint_utilities.get_download_urls_for_files_in_folder.return_value = mock_files
+
+ results = storage.get_matching_file_names(prefix)
+
+ assert len(results) == 2
+ assert "logs/app_001.log" in results
+ assert "logs/app_002.log" in results
+
+
+def test_get_matching_file_names_no_matches(storage, mock_sharepoint_utilities):
+ """Test that empty list is returned when no files match."""
+ prefix = "logs/nonexistent_"
+
+ mock_sharepoint_utilities.get_download_urls_for_files_in_folder.return_value = []
+
+ results = storage.get_matching_file_names(prefix)
+
+ assert len(results) == 0
+
+
+def test_get_matching_file_names_filters_non_files(storage, mock_sharepoint_utilities):
+ """Test that only files (not folders) are returned."""
+ prefix = "logs/app_"
+ mock_files = [
+ {"name": "app_001.log", "file": {}},
+ {"name": "subfolder", "folder": {}}, # Should be filtered (has folder, no file)
+ {"name": "app_002.log", "file": {}},
+ ]
+
+ mock_sharepoint_utilities.get_download_urls_for_files_in_folder.return_value = mock_files
+
+ results = storage.get_matching_file_names(prefix)
+
+ # The implementation filters for files with "file" key in the dict
+ assert len(results) == 2
+
+
+# endregion
+
+# region Tests for get_latest_matching_file_name
+
+
+def test_get_latest_matching_file_name_success(storage, mock_sharepoint_utilities):
+ """Test getting the latest matching file."""
+ prefix = "logs/app_"
+ mock_files = [
+ {"name": "app_001.log", "file": {}},
+ {"name": "app_003.log", "file": {}},
+ {"name": "app_002.log", "file": {}},
+ ]
+
+ mock_sharepoint_utilities.get_download_urls_for_files_in_folder.return_value = mock_files
+
+ result = storage.get_latest_matching_file_name(prefix)
+
+ assert result == "logs/app_003.log"
+
+
+def test_get_latest_matching_file_name_no_matches(storage, mock_sharepoint_utilities):
+ """Test that FileNotFoundError is raised when no files match."""
+ prefix = "logs/nonexistent_"
+
+ mock_sharepoint_utilities.get_download_urls_for_files_in_folder.return_value = []
+
+ with pytest.raises(FileNotFoundError) as exc_info:
+ storage.get_latest_matching_file_name(prefix)
+
+ assert "No files found" in str(exc_info.value)
+
+
+# endregion
+
+# region Tests for get_latest_matching_file_bytes
+
+
+def test_get_latest_matching_file_bytes_success(storage, mock_sharepoint_utilities):
+ """Test getting bytes of the latest matching file."""
+ prefix = "logs/app_"
+ mock_files = [
+ {"name": "app_001.log", "file": {}},
+ {"name": "app_003.log", "file": {}},
+ ]
+ test_content = b"Latest log entry"
+ download_url = "https://example.sharepoint.com/download/latest"
+
+ mock_sharepoint_utilities.get_download_urls_for_files_in_folder.return_value = mock_files
+ mock_sharepoint_utilities.get_file_download_url.return_value = download_url
+
+ with patch("requests.get") as mock_get:
+ mock_response = Mock()
+ mock_response.content = test_content
+ mock_response.raise_for_status = Mock()
+ mock_get.return_value = mock_response
+
+ result = storage.get_latest_matching_file_bytes(prefix)
+
+ assert isinstance(result, BytesIO)
+ content = result.read()
+ assert content == test_content
+
+
+# endregion
+
+# region Tests for get_single_matching_file_name
+
+
+def test_get_single_matching_file_name_success(storage, mock_sharepoint_utilities):
+ """Test getting single file when exactly one matches."""
+ prefix = "data/data_"
+ mock_files = [{"name": "data_001.csv", "file": {}}]
+
+ mock_sharepoint_utilities.get_download_urls_for_files_in_folder.return_value = mock_files
+
+ result = storage.get_single_matching_file_name(prefix)
+
+ assert result == "data/data_001.csv"
+
+
+def test_get_single_matching_file_name_no_matches(storage, mock_sharepoint_utilities):
+ """Test that FileNotFoundError is raised when no files match."""
+ prefix = "logs/nonexistent_"
+
+ mock_sharepoint_utilities.get_download_urls_for_files_in_folder.return_value = []
+
+ with pytest.raises(FileNotFoundError) as exc_info:
+ storage.get_single_matching_file_name(prefix)
+
+ assert "No files found" in str(exc_info.value)
+
+
+def test_get_single_matching_file_name_multiple_matches(storage, mock_sharepoint_utilities):
+ """Test that ValueError is raised when multiple files match."""
+ prefix = "logs/app_"
+ mock_files = [
+ {"name": "app_001.log", "file": {}},
+ {"name": "app_002.log", "file": {}},
+ ]
+
+ mock_sharepoint_utilities.get_download_urls_for_files_in_folder.return_value = mock_files
+
+ with pytest.raises(ValueError) as exc_info:
+ storage.get_single_matching_file_name(prefix)
+
+ assert "Multiple files found" in str(exc_info.value)
+ assert "expected only 1" in str(exc_info.value)
+
+
+# endregion
+
+# region Tests for get_single_matching_file_bytes
+
+
+def test_get_single_matching_file_bytes_success(storage, mock_sharepoint_utilities):
+ """Test getting bytes of single matching file."""
+ prefix = "data/data_"
+ mock_files = [{"name": "data_001.csv", "file": {}}]
+ test_content = b"col1,col2,col3"
+ download_url = "https://example.sharepoint.com/download/data"
+
+ mock_sharepoint_utilities.get_download_urls_for_files_in_folder.return_value = mock_files
+ mock_sharepoint_utilities.get_file_download_url.return_value = download_url
+
+ with patch("requests.get") as mock_get:
+ mock_response = Mock()
+ mock_response.content = test_content
+ mock_response.raise_for_status = Mock()
+ mock_get.return_value = mock_response
+
+ result = storage.get_single_matching_file_bytes(prefix)
+
+ assert isinstance(result, BytesIO)
+ content = result.read()
+ assert content == test_content
+
+
+# endregion
+
+# region Tests for write_file
+
+
+def test_write_file_success(storage, mock_sharepoint_utilities):
+ """Test writing file to SharePoint."""
+ filename = "logs/app_004.log"
+ file_content = b"New log entry"
+
+ mock_sharepoint_utilities.save_file.return_value = {"id": "file123", "webUrl": "https://..."}
+
+ storage.write_file(filename, file_content)
+
+ mock_sharepoint_utilities.save_file.assert_called_once_with(
+ "fake_drive_id", filename, "fake_token", bytearray(file_content)
+ )
+
+
+def test_write_file_empty_content(storage, mock_sharepoint_utilities):
+ """Test writing empty content."""
+ filename = "logs/empty.log"
+ file_content = b""
+
+ mock_sharepoint_utilities.save_file.return_value = {"id": "file_empty", "webUrl": "https://..."}
+
+ storage.write_file(filename, file_content)
+
+ mock_sharepoint_utilities.save_file.assert_called_once()
+
+
+# endregion
+
+# region Tests for list_subfolders
+
+
+def test_list_subfolders_success(storage, mock_sharepoint_utilities):
+ """Test listing subfolders."""
+ folder_path = "sample_logs"
+ mock_items = [
+ {"webUrl": "https://example.sharepoint.com/folders/archive", "folder": {}, "name": "archive"},
+ {"webUrl": "https://example.sharepoint.com/folders/backups", "folder": {}, "name": "backups"},
+ ]
+
+ mock_sharepoint_utilities.get_items_in_folder.return_value = mock_items
+
+ results = storage.list_subfolders(folder_path)
+
+ assert len(results) == 2
+ assert "archive" in results
+ assert "backups" in results
+ mock_sharepoint_utilities.get_items_in_folder.assert_called_once_with(
+ drive_id="fake_drive_id", folder_path=folder_path, token="fake_token"
+ )
+
+
+def test_list_subfolders_nested_path(storage, mock_sharepoint_utilities):
+ """Test listing subfolders in a nested path."""
+ folder_path = "logs/archive"
+ mock_items = [
+ {"webUrl": "https://example.sharepoint.com/folders/2024", "folder": {}, "name": "2024"},
+ {"webUrl": "https://example.sharepoint.com/folders/2025", "folder": {}, "name": "2025"},
+ ]
+
+ mock_sharepoint_utilities.get_items_in_folder.return_value = mock_items
+
+ results = storage.list_subfolders(folder_path)
+
+ assert len(results) == 2
+ mock_sharepoint_utilities.get_items_in_folder.assert_called_once_with(
+ drive_id="fake_drive_id", folder_path=folder_path, token="fake_token"
+ )
+
+
+def test_list_subfolders_no_subfolders(storage, mock_sharepoint_utilities):
+ """Test listing when no subfolders exist."""
+ folder_path = "data"
+
+ mock_sharepoint_utilities.get_items_in_folder.return_value = []
+
+ results = storage.list_subfolders(folder_path)
+
+ assert len(results) == 0
+
+
+# endregion
diff --git a/tests/unit/test_sharepoint_utils.py b/tests/unit/test_sharepoint_utils.py
index 81ae677..14b4ae8 100644
--- a/tests/unit/test_sharepoint_utils.py
+++ b/tests/unit/test_sharepoint_utils.py
@@ -448,3 +448,91 @@ def test_get_drive_id_failure():
# endregion
+
+# region Tests for assign_item_permissions
+
+
+def test_assign_item_permissions_success():
+ sharepoint_tenant_fqdn = "example.sharepoint.com"
+ sharepoint_site_name = "site_name"
+ library_name = "library_name"
+ item_path = "folder/file.txt"
+ token = "fake_token"
+ recipients = [{"email": "user@example.com"}]
+ write_permission = True
+ drive_id = "fake_drive_id"
+ mock_response_json = {"status": "success"}
+ headers = {
+ "Accept": "application/json",
+ "Content-Type": "application/json; charset=utf-8",
+ "Authorization": f"Bearer {token}",
+ }
+
+ with patch.object(SharePointUtilities, "get_drive_id", return_value=drive_id):
+ with patch("requests.post") as mock_post:
+ mock_post.return_value = Mock(status_code=200, json=lambda: mock_response_json)
+ result = SharePointUtilities.assign_item_permissions(
+ sharepoint_tenant_fqdn,
+ sharepoint_site_name,
+ library_name,
+ item_path,
+ token,
+ recipients,
+ write_permission,
+ )
+ mock_post.assert_called_once_with(
+ f"https://graph.microsoft.com/v1.0/drives/{drive_id}/root:/{item_path}:/invite",
+ headers=headers,
+ json={
+ "requireSignIn": True,
+ "sendInvitation": False,
+ "roles": ["write"],
+ "recipients": recipients,
+ },
+ )
+ assert result == mock_response_json
+
+
+def test_assign_item_permissions_failure():
+ sharepoint_tenant_fqdn = "example.sharepoint.com"
+ sharepoint_site_name = "site_name"
+ library_name = "library_name"
+ item_path = "folder/file.txt"
+ token = "fake_token"
+ recipients = [{"email": "user@example.com"}]
+ write_permission = False
+ drive_id = "fake_drive_id"
+ headers = {
+ "Accept": "application/json",
+ "Content-Type": "application/json; charset=utf-8",
+ "Authorization": f"Bearer {token}",
+ }
+
+ with patch.object(SharePointUtilities, "get_drive_id", return_value=drive_id):
+ with patch("requests.post") as mock_post:
+ mock_post.return_value = Mock(
+ status_code=500, raise_for_status=lambda: (_ for _ in ()).throw(Exception("HTTP error"))
+ )
+ with pytest.raises(Exception):
+ SharePointUtilities.assign_item_permissions(
+ sharepoint_tenant_fqdn,
+ sharepoint_site_name,
+ library_name,
+ item_path,
+ token,
+ recipients,
+ write_permission,
+ )
+ mock_post.assert_called_once_with(
+ f"https://graph.microsoft.com/v1.0/drives/{drive_id}/root:/{item_path}:/invite",
+ headers=headers,
+ json={
+ "requireSignIn": True,
+ "sendInvitation": False,
+ "roles": ["read"],
+ "recipients": recipients,
+ },
+ )
+
+
+# endregion