Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 27 additions & 0 deletions examples/oauth_schema_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,33 @@ def custom_oauth_function(config):
custom_sr_client = SchemaRegistryClient(custom_sr_config)
print(custom_sr_client.get_subjects())

# Example: Using union-of-pools with comma-separated pool IDs
union_of_pools_config = {
'url': 'https://psrc-123456.us-east-1.aws.confluent.cloud',
'bearer.auth.credentials.source': 'STATIC_TOKEN',
'bearer.auth.token': 'multi-pool-token',
'bearer.auth.logical.cluster': 'lsrc-12345',
'bearer.auth.identity.pool.id': 'pool-abc,pool-def,pool-ghi',
}

union_sr_client = SchemaRegistryClient(union_of_pools_config)
print(union_sr_client.get_subjects())

# Example: Omitting identity pool for auto pool mapping
auto_pool_config = {
'url': 'https://psrc-123456.us-east-1.aws.confluent.cloud',
'bearer.auth.credentials.source': 'OAUTHBEARER',
'bearer.auth.client.id': 'client-id',
'bearer.auth.client.secret': 'client-secret',
'bearer.auth.scope': 'schema_registry',
'bearer.auth.issuer.endpoint.url': 'https://yourauthprovider.com/v1/token',
'bearer.auth.logical.cluster': 'lsrc-12345',
# bearer.auth.identity.pool.id is omitted - SR will use auto pool mapping
}

auto_pool_sr_client = SchemaRegistryClient(auto_pool_config)
print(auto_pool_sr_client.get_subjects())


if __name__ == '__main__':
main()
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
full_jitter,
is_retriable,
is_success,
normalize_identity_pool,
)
from confluent_kafka.schema_registry.error import OAuthTokenError, SchemaRegistryError

Expand Down Expand Up @@ -97,10 +98,10 @@ def __init__(
scope: str,
token_endpoint: str,
logical_cluster: str,
identity_pool: str,
max_retries: int,
retries_wait_ms: int,
retries_max_wait_ms: int,
identity_pool: Optional[str] = None,
):
self.token = None
self.logical_cluster = logical_cluster
Expand All @@ -113,11 +114,13 @@ def __init__(
self.token_expiry_threshold = 0.8

async def get_bearer_fields(self) -> dict:
return {
fields = {
'bearer.auth.token': await self.get_access_token(),
'bearer.auth.logical.cluster': self.logical_cluster,
'bearer.auth.identity.pool.id': self.identity_pool,
}
if self.identity_pool is not None:
fields['bearer.auth.identity.pool.id'] = self.identity_pool
return fields

def token_expired(self) -> bool:
if self.token is None:
Expand Down Expand Up @@ -283,20 +286,14 @@ def __init__(self, conf: dict):
self.auth = None

if self.bearer_auth_credentials_source in {'OAUTHBEARER', 'STATIC_TOKEN'}:
headers = ['bearer.auth.logical.cluster', 'bearer.auth.identity.pool.id']
missing_headers = [header for header in headers if header not in conf_copy]
if missing_headers:
raise ValueError(
"Missing required bearer configuration properties: {}".format(", ".join(missing_headers))
)
if 'bearer.auth.logical.cluster' not in conf_copy:
raise ValueError("Missing required bearer configuration property: bearer.auth.logical.cluster")

logical_cluster = conf_copy.pop('bearer.auth.logical.cluster')
if not isinstance(logical_cluster, str):
raise TypeError("logical cluster must be a str, not " + str(type(logical_cluster)))

Copy link

Copilot AI Feb 2, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The comment on lines 297-299 describes identity pool as supporting comma-separated values for union-of-pools, but the type check only validates it as a string. Consider adding a comment explaining that comma-separated values are passed as a single string to clarify the expected format.

Suggested change
# Note: identity_pool is always provided and validated as a single
# string. For union-of-pools use cases, multiple identity pool
# IDs should be encoded as a comma-separated list within this
# string. No additional parsing or validation of the individual
# comma-separated values is performed here.

Copilot uses AI. Check for mistakes.
identity_pool = conf_copy.pop('bearer.auth.identity.pool.id')
if not isinstance(identity_pool, str):
raise TypeError("identity pool id must be a str, not " + str(type(identity_pool)))
identity_pool = normalize_identity_pool(conf_copy.pop('bearer.auth.identity.pool.id', None))

if self.bearer_auth_credentials_source == 'OAUTHBEARER':
properties_list = [
Expand Down Expand Up @@ -335,10 +332,10 @@ def __init__(self, conf: dict):
self.scope,
self.token_endpoint,
logical_cluster,
identity_pool,
self.max_retries,
self.retries_wait_ms,
self.retries_max_wait_ms,
identity_pool,
)
else: # STATIC_TOKEN
if 'bearer.auth.token' not in conf_copy:
Expand Down Expand Up @@ -412,7 +409,8 @@ async def handle_bearer_auth(self, headers: dict) -> None:
if self.bearer_field_provider is None:
raise ValueError("Bearer field provider is not set")
bearer_fields = await self.bearer_field_provider.get_bearer_fields()
required_fields = ['bearer.auth.token', 'bearer.auth.identity.pool.id', 'bearer.auth.logical.cluster']
# Note: bearer.auth.identity.pool.id is optional; only token and logical.cluster are required
required_fields = ['bearer.auth.token', 'bearer.auth.logical.cluster']

missing_fields = []
for field in required_fields:
Expand All @@ -427,9 +425,11 @@ async def handle_bearer_auth(self, headers: dict) -> None:
)

headers["Authorization"] = "Bearer {}".format(bearer_fields['bearer.auth.token'])
headers['Confluent-Identity-Pool-Id'] = bearer_fields['bearer.auth.identity.pool.id']
headers['target-sr-cluster'] = bearer_fields['bearer.auth.logical.cluster']

if 'bearer.auth.identity.pool.id' in bearer_fields:
headers['Confluent-Identity-Pool-Id'] = bearer_fields['bearer.auth.identity.pool.id']

async def get(self, url: str, query: Optional[dict] = None) -> Any:
return await self.send_request(url, method='GET', query=query)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
full_jitter,
is_retriable,
is_success,
normalize_identity_pool,
)
from confluent_kafka.schema_registry.error import OAuthTokenError, SchemaRegistryError

Expand Down Expand Up @@ -97,10 +98,10 @@ def __init__(
scope: str,
token_endpoint: str,
logical_cluster: str,
identity_pool: str,
max_retries: int,
retries_wait_ms: int,
retries_max_wait_ms: int,
identity_pool: Optional[str] = None,
):
self.token = None
self.logical_cluster = logical_cluster
Expand All @@ -113,11 +114,13 @@ def __init__(
self.token_expiry_threshold = 0.8

def get_bearer_fields(self) -> dict:
return {
fields = {
'bearer.auth.token': self.get_access_token(),
'bearer.auth.logical.cluster': self.logical_cluster,
'bearer.auth.identity.pool.id': self.identity_pool,
}
if self.identity_pool is not None:
fields['bearer.auth.identity.pool.id'] = self.identity_pool
return fields

def token_expired(self) -> bool:
if self.token is None:
Expand Down Expand Up @@ -283,20 +286,14 @@ def __init__(self, conf: dict):
self.auth = None

if self.bearer_auth_credentials_source in {'OAUTHBEARER', 'STATIC_TOKEN'}:
headers = ['bearer.auth.logical.cluster', 'bearer.auth.identity.pool.id']
missing_headers = [header for header in headers if header not in conf_copy]
if missing_headers:
raise ValueError(
"Missing required bearer configuration properties: {}".format(", ".join(missing_headers))
)
if 'bearer.auth.logical.cluster' not in conf_copy:
raise ValueError("Missing required bearer configuration property: bearer.auth.logical.cluster")

logical_cluster = conf_copy.pop('bearer.auth.logical.cluster')
if not isinstance(logical_cluster, str):
raise TypeError("logical cluster must be a str, not " + str(type(logical_cluster)))

identity_pool = conf_copy.pop('bearer.auth.identity.pool.id')
if not isinstance(identity_pool, str):
raise TypeError("identity pool id must be a str, not " + str(type(identity_pool)))
identity_pool = normalize_identity_pool(conf_copy.pop('bearer.auth.identity.pool.id', None))

if self.bearer_auth_credentials_source == 'OAUTHBEARER':
properties_list = [
Expand Down Expand Up @@ -335,10 +332,10 @@ def __init__(self, conf: dict):
self.scope,
self.token_endpoint,
logical_cluster,
identity_pool,
self.max_retries,
self.retries_wait_ms,
self.retries_max_wait_ms,
identity_pool,
)
else: # STATIC_TOKEN
if 'bearer.auth.token' not in conf_copy:
Expand Down Expand Up @@ -412,7 +409,8 @@ def handle_bearer_auth(self, headers: dict) -> None:
if self.bearer_field_provider is None:
raise ValueError("Bearer field provider is not set")
bearer_fields = self.bearer_field_provider.get_bearer_fields()
required_fields = ['bearer.auth.token', 'bearer.auth.identity.pool.id', 'bearer.auth.logical.cluster']
# Note: bearer.auth.identity.pool.id is optional; only token and logical.cluster are required
required_fields = ['bearer.auth.token', 'bearer.auth.logical.cluster']

missing_fields = []
for field in required_fields:
Expand All @@ -427,9 +425,11 @@ def handle_bearer_auth(self, headers: dict) -> None:
)

headers["Authorization"] = "Bearer {}".format(bearer_fields['bearer.auth.token'])
headers['Confluent-Identity-Pool-Id'] = bearer_fields['bearer.auth.identity.pool.id']
headers['target-sr-cluster'] = bearer_fields['bearer.auth.logical.cluster']

if 'bearer.auth.identity.pool.id' in bearer_fields:
headers['Confluent-Identity-Pool-Id'] = bearer_fields['bearer.auth.identity.pool.id']

def get(self, url: str, query: Optional[dict] = None) -> Any:
return self.send_request(url, method='GET', query=query)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
'is_success',
'is_retriable',
'full_jitter',
'normalize_identity_pool',
'_StaticFieldProvider',
'_AsyncStaticFieldProvider',
'_SchemaCache',
Expand Down Expand Up @@ -71,33 +72,37 @@ async def get_bearer_fields(self) -> dict:
class _StaticFieldProvider(_BearerFieldProvider):
"""Synchronous static token bearer field provider."""

def __init__(self, token: str, logical_cluster: str, identity_pool: str):
def __init__(self, token: str, logical_cluster: str, identity_pool: Optional[str] = None):
self.token = token
self.logical_cluster = logical_cluster
self.identity_pool = identity_pool

def get_bearer_fields(self) -> dict:
return {
fields = {
'bearer.auth.token': self.token,
'bearer.auth.logical.cluster': self.logical_cluster,
'bearer.auth.identity.pool.id': self.identity_pool,
}
if self.identity_pool is not None:
fields['bearer.auth.identity.pool.id'] = self.identity_pool
return fields


class _AsyncStaticFieldProvider(_AsyncBearerFieldProvider):
"""Asynchronous static token bearer field provider."""

def __init__(self, token: str, logical_cluster: str, identity_pool: str):
def __init__(self, token: str, logical_cluster: str, identity_pool: Optional[str] = None):
self.token = token
self.logical_cluster = logical_cluster
self.identity_pool = identity_pool

async def get_bearer_fields(self) -> dict:
return {
fields = {
'bearer.auth.token': self.token,
'bearer.auth.logical.cluster': self.logical_cluster,
'bearer.auth.identity.pool.id': self.identity_pool,
}
if self.identity_pool is not None:
fields['bearer.auth.identity.pool.id'] = self.identity_pool
return fields


def is_success(status_code: int) -> bool:
Expand All @@ -113,6 +118,35 @@ def full_jitter(base_delay_ms: int, max_delay_ms: int, retries_attempted: int) -
return random.random() * min(no_jitter_delay, max_delay_ms)


def normalize_identity_pool(identity_pool_raw: Any) -> Optional[str]:
"""
Normalize identity pool configuration to a comma-separated string.

Identity pool can be provided as:
- None: Returns None (no identity pool configured)
- str: Returns as-is (single pool ID or already comma-separated)
- list[str]: Joins with commas (multiple pool IDs)

Args:
identity_pool_raw: The raw identity pool configuration value.

Returns:
A comma-separated string of identity pool IDs, or None.

Raises:
TypeError: If identity_pool_raw is not None, str, or list of strings.
"""
if identity_pool_raw is None:
return None
if isinstance(identity_pool_raw, str):
return identity_pool_raw
if isinstance(identity_pool_raw, list):
if not all(isinstance(item, str) for item in identity_pool_raw):
raise TypeError("All items in identity pool list must be strings")
return ",".join(identity_pool_raw)
raise TypeError("identity pool id must be a str or list, not " + str(type(identity_pool_raw)))


class _SchemaCache(object):
"""
Thread-safe cache for use with the Schema Registry Client.
Expand Down
Loading