From 07c7cc57d3239b3f7319a70fa41beb448ae83bec Mon Sep 17 00:00:00 2001 From: Clio Date: Wed, 1 Apr 2026 23:08:06 +0800 Subject: [PATCH 1/3] fix(enterprise): persist llm model temperature and max_output_tokens (#247) --- backend/app/api/enterprise.py | 35 ++++++----------------------------- 1 file changed, 6 insertions(+), 29 deletions(-) diff --git a/backend/app/api/enterprise.py b/backend/app/api/enterprise.py index e68484d8..71c1da35 100644 --- a/backend/app/api/enterprise.py +++ b/backend/app/api/enterprise.py @@ -33,30 +33,7 @@ router = APIRouter(prefix="/enterprise", tags=["enterprise"]) -# ─── Public: Check Email Exists ──────────────────────── - -class CheckEmailRequest(BaseModel): - email: str - - -@router.post("/check-email-exists") -async def check_email_exists( - data: CheckEmailRequest, - db: AsyncSession = Depends(get_db), -): - """Public endpoint — check if an email address is already registered on this platform. - - Used by the invitation flow to decide whether to show the login or register form. - Only returns a boolean; does not expose any user data. - """ - from app.models.user import Identity - result = await db.execute( - select(Identity).where(Identity.email == data.email.strip().lower()) - ) - exists = result.scalar_one_or_none() is not None - return {"exists": exists} - - +# ─── LLM Model Pool ──────────────────────────────────── @router.get("/llm-providers") async def list_llm_providers( @@ -159,11 +136,11 @@ async def add_llm_model( api_key_encrypted=data.api_key, # TODO: encrypt base_url=data.base_url, label=data.label, + temperature=data.temperature, max_tokens_per_day=data.max_tokens_per_day, enabled=data.enabled, supports_vision=data.supports_vision, max_output_tokens=data.max_output_tokens, - request_timeout=data.request_timeout, tenant_id=uuid.UUID(tid) if tid else None, ) db.add(model) @@ -244,10 +221,10 @@ async def update_llm_model( model.enabled = data.enabled if hasattr(data, 'supports_vision') and data.supports_vision is not None: model.supports_vision = data.supports_vision - if hasattr(data, 'max_output_tokens') and data.max_output_tokens is not None: + if hasattr(data, "temperature") and data.temperature is not None: + model.temperature = data.temperature + if hasattr(data, "max_output_tokens") and data.max_output_tokens is not None: model.max_output_tokens = data.max_output_tokens - if hasattr(data, 'request_timeout') and data.request_timeout is not None: - model.request_timeout = data.request_timeout await db.commit() await db.refresh(model) @@ -1386,7 +1363,7 @@ async def invite_users( db.add(code) codes.append(code) - invite_url = f"{base_url}/login?code={code_str}&email={email}" + invite_url = f"{base_url}/login?code={code_str}" inviter_name = current_user.display_name or current_user.username From 95dd36132dc4d50859089bf71a6a7cdd1f4bb875 Mon Sep 17 00:00:00 2001 From: Clio Date: Wed, 1 Apr 2026 23:10:59 +0800 Subject: [PATCH 2/3] test(enterprise): cover llm model temperature and max_output_tokens persistence --- .../test_enterprise_llm_model_api.py | 119 ++++++++++++++++++ 1 file changed, 119 insertions(+) create mode 100644 backend/tests/test_enterprise_llm_model_api.py/test_enterprise_llm_model_api.py diff --git a/backend/tests/test_enterprise_llm_model_api.py/test_enterprise_llm_model_api.py b/backend/tests/test_enterprise_llm_model_api.py/test_enterprise_llm_model_api.py new file mode 100644 index 00000000..ba3754d0 --- /dev/null +++ b/backend/tests/test_enterprise_llm_model_api.py/test_enterprise_llm_model_api.py @@ -0,0 +1,119 @@ +import uuid +from datetime import UTC, datetime +from types import SimpleNamespace + +import pytest + +from app.api import enterprise as enterprise_api +from app.models.llm import LLMModel +from app.schemas.schemas import LLMModelCreate, LLMModelUpdate + + +class DummyResult: + def __init__(self, value): + self._value = value + + def scalar_one_or_none(self): + return self._value + + +class RecordingDB: + def __init__(self, *, execute_results=None): + self.execute_results = list(execute_results or []) + self.added = [] + self.flushed = False + self.committed = False + self.refreshed = [] + + def add(self, obj): + self.added.append(obj) + + async def flush(self): + for obj in self.added: + if getattr(obj, "id", None) is None: + obj.id = uuid.uuid4() + if getattr(obj, "created_at", None) is None: + obj.created_at = datetime.now(UTC) + self.flushed = True + + async def execute(self, _statement): + return DummyResult(self.execute_results.pop(0) if self.execute_results else None) + + async def commit(self): + self.committed = True + + async def refresh(self, obj): + if getattr(obj, "id", None) is None: + obj.id = uuid.uuid4() + if getattr(obj, "created_at", None) is None: + obj.created_at = datetime.now(UTC) + self.refreshed.append(obj) + + +@pytest.mark.asyncio +async def test_add_llm_model_persists_temperature_and_max_output_tokens(): + tenant_id = uuid.uuid4() + db = RecordingDB() + current_user = SimpleNamespace(tenant_id=tenant_id) + + created = await enterprise_api.add_llm_model( + data=LLMModelCreate( + provider="openai", + model="gpt-4.1", + api_key="sk-test", + base_url="https://api.example.com/v1", + label="Primary", + temperature=0.4, + max_tokens_per_day=1000, + enabled=True, + supports_vision=False, + max_output_tokens=4096, + ), + tenant_id=str(tenant_id), + current_user=current_user, + db=db, + ) + + assert db.flushed is True + assert len(db.added) == 1 + model = db.added[0] + assert model.temperature == 0.4 + assert model.max_output_tokens == 4096 + assert created.temperature == 0.4 + assert created.max_output_tokens == 4096 + + +@pytest.mark.asyncio +async def test_update_llm_model_persists_temperature_and_max_output_tokens(): + existing = LLMModel( + provider="openai", + model="gpt-4.1", + api_key_encrypted="sk-test", + base_url="https://api.example.com/v1", + label="Primary", + temperature=None, + max_tokens_per_day=1000, + enabled=True, + supports_vision=False, + max_output_tokens=None, + ) + existing.id = uuid.uuid4() + existing.created_at = datetime.now(UTC) + db = RecordingDB(execute_results=[existing]) + + updated = await enterprise_api.update_llm_model( + model_id=existing.id, + data=LLMModelUpdate( + temperature=0.2, + max_output_tokens=2048, + ), + current_user=SimpleNamespace(), + db=db, + ) + + assert db.committed is True + assert db.refreshed == [existing] + assert existing.temperature == 0.2 + assert existing.max_output_tokens == 2048 + assert updated.temperature == 0.2 + assert updated.max_output_tokens == 2048 From 06f4135b74e84f33fa85b9f0bf6358ccf6b96c4e Mon Sep 17 00:00:00 2001 From: Clio Date: Wed, 1 Apr 2026 23:16:07 +0800 Subject: [PATCH 3/3] Update enterprise.pyfix(enterprise): rebase llm model temperature persistence on current main --- backend/app/api/enterprise.py | 32 +++++++++++++++++++++++++++++--- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/backend/app/api/enterprise.py b/backend/app/api/enterprise.py index 71c1da35..e0d79bee 100644 --- a/backend/app/api/enterprise.py +++ b/backend/app/api/enterprise.py @@ -33,7 +33,30 @@ router = APIRouter(prefix="/enterprise", tags=["enterprise"]) -# ─── LLM Model Pool ──────────────────────────────────── +# ─── Public: Check Email Exists ──────────────────────── + +class CheckEmailRequest(BaseModel): + email: str + + +@router.post("/check-email-exists") +async def check_email_exists( + data: CheckEmailRequest, + db: AsyncSession = Depends(get_db), +): + """Public endpoint — check if an email address is already registered on this platform. + + Used by the invitation flow to decide whether to show the login or register form. + Only returns a boolean; does not expose any user data. + """ + from app.models.user import Identity + result = await db.execute( + select(Identity).where(Identity.email == data.email.strip().lower()) + ) + exists = result.scalar_one_or_none() is not None + return {"exists": exists} + + @router.get("/llm-providers") async def list_llm_providers( @@ -141,6 +164,7 @@ async def add_llm_model( enabled=data.enabled, supports_vision=data.supports_vision, max_output_tokens=data.max_output_tokens, + request_timeout=data.request_timeout, tenant_id=uuid.UUID(tid) if tid else None, ) db.add(model) @@ -223,8 +247,10 @@ async def update_llm_model( model.supports_vision = data.supports_vision if hasattr(data, "temperature") and data.temperature is not None: model.temperature = data.temperature - if hasattr(data, "max_output_tokens") and data.max_output_tokens is not None: + if hasattr(data, 'max_output_tokens') and data.max_output_tokens is not None: model.max_output_tokens = data.max_output_tokens + if hasattr(data, 'request_timeout') and data.request_timeout is not None: + model.request_timeout = data.request_timeout await db.commit() await db.refresh(model) @@ -1363,7 +1389,7 @@ async def invite_users( db.add(code) codes.append(code) - invite_url = f"{base_url}/login?code={code_str}" + invite_url = f"{base_url}/login?code={code_str}&email={email}" inviter_name = current_user.display_name or current_user.username