diff --git a/backend/alembic/versions/add_subdomain_prefix.py b/backend/alembic/versions/add_subdomain_prefix.py
new file mode 100644
index 00000000..7ba28572
--- /dev/null
+++ b/backend/alembic/versions/add_subdomain_prefix.py
@@ -0,0 +1,27 @@
+"""Add subdomain_prefix to tenants table."""
+
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy import inspect
+from typing import Sequence, Union
+
+revision: str = "add_subdomain_prefix"
+down_revision = ("add_tool_source", "merge_upstream_and_local")
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ conn = op.get_bind()
+ inspector = inspect(conn)
+ columns = [c["name"] for c in inspector.get_columns("tenants")]
+ if "subdomain_prefix" not in columns:
+ op.add_column("tenants", sa.Column("subdomain_prefix", sa.String(50), nullable=True))
+ indexes = [i["name"] for i in inspector.get_indexes("tenants")]
+ if "ix_tenants_subdomain_prefix" not in indexes:
+ op.create_index("ix_tenants_subdomain_prefix", "tenants", ["subdomain_prefix"], unique=True)
+
+
+def downgrade() -> None:
+ op.drop_index("ix_tenants_subdomain_prefix", "tenants")
+ op.drop_column("tenants", "subdomain_prefix")
diff --git a/backend/alembic/versions/add_tenant_is_default.py b/backend/alembic/versions/add_tenant_is_default.py
new file mode 100644
index 00000000..6052eaa7
--- /dev/null
+++ b/backend/alembic/versions/add_tenant_is_default.py
@@ -0,0 +1,38 @@
+"""Add is_default field to tenants table.
+
+Revision ID: add_tenant_is_default
+Revises: add_subdomain_prefix
+Create Date: 2026-03-30
+"""
+
+from alembic import op
+import sqlalchemy as sa
+
+revision = "add_tenant_is_default"
+down_revision = "add_subdomain_prefix"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ conn = op.get_bind()
+
+ # 1. Add is_default column (idempotent)
+ inspector = sa.inspect(conn)
+ cols = [c['name'] for c in inspector.get_columns('tenants')]
+ if 'is_default' not in cols:
+ op.add_column('tenants', sa.Column('is_default', sa.Boolean(), nullable=False, server_default='false'))
+
+ # 2. Set the earliest active tenant as default (only if no tenant is already default)
+ conn.execute(sa.text("""
+ UPDATE tenants
+ SET is_default = true
+ WHERE id = (
+ SELECT id FROM tenants WHERE is_active = true ORDER BY created_at ASC LIMIT 1
+ )
+ AND NOT EXISTS (SELECT 1 FROM tenants WHERE is_default = true)
+ """))
+
+
+def downgrade() -> None:
+ op.drop_column('tenants', 'is_default')
diff --git a/backend/alembic/versions/add_user_source.py b/backend/alembic/versions/add_user_source.py
new file mode 100644
index 00000000..8f469ac4
--- /dev/null
+++ b/backend/alembic/versions/add_user_source.py
@@ -0,0 +1,26 @@
+"""Add source column to users table."""
+
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy import inspect
+
+revision = "add_user_source"
+down_revision = "add_llm_max_output_tokens"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ conn = op.get_bind()
+ inspector = inspect(conn)
+ columns = [c["name"] for c in inspector.get_columns("users")]
+ if "source" not in columns:
+ op.add_column("users", sa.Column("source", sa.String(50), nullable=True, server_default="web"))
+ indexes = [i["name"] for i in inspector.get_indexes("users")]
+ if "ix_users_source" not in indexes:
+ op.create_index("ix_users_source", "users", ["source"])
+
+
+def downgrade():
+ op.drop_index("ix_users_source", "users")
+ op.drop_column("users", "source")
diff --git a/backend/alembic/versions/f8a934bf9f17_merge_identity_refactor_with_tenant_is_.py b/backend/alembic/versions/f8a934bf9f17_merge_identity_refactor_with_tenant_is_.py
new file mode 100644
index 00000000..11ca9f5d
--- /dev/null
+++ b/backend/alembic/versions/f8a934bf9f17_merge_identity_refactor_with_tenant_is_.py
@@ -0,0 +1,21 @@
+"""merge identity refactor with tenant_is_default
+
+Revision ID: f8a934bf9f17
+Revises: add_tenant_is_default, d9cbd43b62e5
+Create Date: 2026-04-02
+"""
+from typing import Sequence, Union
+
+# revision identifiers, used by Alembic.
+revision: str = 'f8a934bf9f17'
+down_revision: Union[str, Sequence[str]] = ('add_tenant_is_default', 'd9cbd43b62e5')
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ pass
+
+
+def downgrade() -> None:
+ pass
diff --git a/backend/alembic/versions/merge_heads.py b/backend/alembic/versions/merge_heads.py
new file mode 100644
index 00000000..196c55d1
--- /dev/null
+++ b/backend/alembic/versions/merge_heads.py
@@ -0,0 +1,16 @@
+"""Merge upstream and local migration heads."""
+
+from alembic import op
+
+revision = "merge_upstream_and_local"
+down_revision = ("add_daily_token_usage", "add_user_source")
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ pass
+
+
+def downgrade():
+ pass
diff --git a/backend/app/api/activity.py b/backend/app/api/activity.py
index a22e5a98..915a8e22 100644
--- a/backend/app/api/activity.py
+++ b/backend/app/api/activity.py
@@ -21,8 +21,11 @@ async def get_agent_activity(
current_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
- """Get recent activity logs for an agent."""
- await check_agent_access(db, current_user, agent_id)
+ """Get recent activity logs for an agent. Only the agent creator can view."""
+ from app.core.permissions import is_agent_creator
+ agent, _access = await check_agent_access(db, current_user, agent_id)
+ if not is_agent_creator(current_user, agent):
+ return []
result = await db.execute(
select(AgentActivityLog)
diff --git a/backend/app/api/admin.py b/backend/app/api/admin.py
index 8921f6bb..a61acfaa 100644
--- a/backend/app/api/admin.py
+++ b/backend/app/api/admin.py
@@ -9,6 +9,7 @@
from datetime import datetime
from fastapi import APIRouter, Depends, HTTPException
+from loguru import logger
from pydantic import BaseModel, Field
from sqlalchemy import func as sqla_func, select
from sqlalchemy.ext.asyncio import AsyncSession
@@ -31,8 +32,10 @@ class CompanyStats(BaseModel):
name: str
slug: str
is_active: bool
+ is_default: bool = False
sso_enabled: bool = False
sso_domain: str | None = None
+ subdomain_prefix: str | None = None
created_at: datetime | None = None
user_count: int = 0
agent_count: int = 0
@@ -117,8 +120,10 @@ async def list_companies(
name=tenant.name,
slug=tenant.slug,
is_active=tenant.is_active,
+ is_default=tenant.is_default,
sso_enabled=tenant.sso_enabled,
sso_domain=tenant.sso_domain,
+ subdomain_prefix=tenant.subdomain_prefix,
created_at=tenant.created_at,
user_count=user_count,
agent_count=agent_count,
@@ -159,6 +164,13 @@ async def create_company(
db.add(invite)
await db.flush()
+ # Seed default agents (Morty & Meeseeks) for the new company
+ try:
+ from app.services.agent_seeder import seed_default_agents
+ await seed_default_agents(tenant_id=tenant.id, creator_id=current_user.id, db=db)
+ except Exception as e:
+ logger.warning(f"[create_company] Failed to seed default agents: {e}")
+
return CompanyCreateResponse(
company=CompanyStats(
id=tenant.id,
@@ -198,6 +210,56 @@ async def toggle_company(
return {"ok": True, "is_active": new_state}
+
+@router.get("/companies/{company_id}/invitation-codes")
+async def list_company_invitation_codes(
+ company_id: uuid.UUID,
+ current_user: User = Depends(require_role("platform_admin")),
+ db: AsyncSession = Depends(get_db),
+):
+ result = await db.execute(
+ select(InvitationCode)
+ .where(InvitationCode.tenant_id == company_id)
+ .where(InvitationCode.is_active == True)
+ .order_by(InvitationCode.created_at.desc())
+ )
+ codes = result.scalars().all()
+ return {
+ "codes": [
+ {
+ "id": str(c.id),
+ "code": c.code,
+ "max_uses": c.max_uses,
+ "used_count": c.used_count,
+ "is_active": c.is_active,
+ "created_at": c.created_at.isoformat() if c.created_at else None,
+ }
+ for c in codes
+ ]
+ }
+
+
+@router.post("/companies/{company_id}/invitation-codes")
+async def create_company_invitation_code(
+ company_id: uuid.UUID,
+ current_user: User = Depends(require_role("platform_admin")),
+ db: AsyncSession = Depends(get_db),
+):
+ t_result = await db.execute(select(Tenant).where(Tenant.id == company_id))
+ if not t_result.scalar_one_or_none():
+ raise HTTPException(status_code=404, detail="Company not found")
+
+ code_str = secrets.token_urlsafe(12)[:16].upper()
+ invite = InvitationCode(
+ code=code_str,
+ tenant_id=company_id,
+ max_uses=1,
+ created_by=current_user.id,
+ )
+ db.add(invite)
+ await db.flush()
+ return {"code": code_str}
+
# ─── Platform Metrics Dashboard ─────────────────────────
from typing import Any
@@ -556,3 +618,137 @@ async def update_platform_settings(
await db.flush()
return await get_platform_settings(current_user=current_user, db=db)
+
+
+@router.delete("/companies/{company_id}", status_code=204)
+async def delete_company(
+ company_id: uuid.UUID,
+ current_user: User = Depends(require_role("platform_admin")),
+ db: AsyncSession = Depends(get_db),
+):
+ """Permanently delete a company and all associated data.
+
+ Cannot delete the default company.
+ Cascade-deletes all related records in dependency order.
+ """
+ # 1. Find the tenant
+ result = await db.execute(select(Tenant).where(Tenant.id == company_id))
+ tenant = result.scalar_one_or_none()
+ if not tenant:
+ raise HTTPException(status_code=404, detail="Company not found")
+
+ # 2. Cannot delete default company
+ if tenant.is_default:
+ raise HTTPException(
+ status_code=400,
+ detail="Cannot delete the default company. Please set another company as default first."
+ )
+
+ # 3. Cascade delete all associated data
+ from sqlalchemy import delete as sa_delete
+ from app.models.activity_log import AgentActivityLog, DailyTokenUsage
+ from app.models.audit import AuditLog, ApprovalRequest, ChatMessage
+ from app.models.channel_config import ChannelConfig
+ from app.models.chat_session import ChatSession
+ from app.models.gateway_message import GatewayMessage
+ from app.models.llm import LLMModel
+ from app.models.notification import Notification
+ from app.models.org import OrgMember, OrgDepartment, AgentRelationship, AgentAgentRelationship
+ from app.models.published_page import PublishedPage
+ from app.models.schedule import AgentSchedule
+ from app.models.skill import Skill
+ from app.models.task import Task
+ from app.models.tenant_setting import TenantSetting
+ from app.models.trigger import AgentTrigger
+ from app.models.tool import AgentTool
+ from app.models.identity import IdentityProvider, SSOScanSession
+
+ # 3.1 Collect agent_ids and user_ids for this tenant
+ agent_ids_result = await db.execute(
+ select(Agent.id).where(Agent.tenant_id == company_id)
+ )
+ agent_ids = [row[0] for row in agent_ids_result.all()]
+
+ user_ids_result = await db.execute(
+ select(User.id).where(User.tenant_id == company_id)
+ )
+ user_ids = [row[0] for row in user_ids_result.all()]
+
+ # 3.2 Delete tables that reference agents (via agent_id)
+ if agent_ids:
+ await db.execute(sa_delete(AgentTrigger).where(AgentTrigger.agent_id.in_(agent_ids)))
+ await db.execute(sa_delete(AgentSchedule).where(AgentSchedule.agent_id.in_(agent_ids)))
+ await db.execute(sa_delete(AgentActivityLog).where(AgentActivityLog.agent_id.in_(agent_ids)))
+ await db.execute(sa_delete(ChannelConfig).where(ChannelConfig.agent_id.in_(agent_ids)))
+ await db.execute(sa_delete(AgentTool).where(AgentTool.agent_id.in_(agent_ids)))
+ await db.execute(sa_delete(Notification).where(Notification.agent_id.in_(agent_ids)))
+ # Delete TaskLog before Task (FK: task_id -> tasks.id, no cascade)
+ from app.models.task import TaskLog
+ task_ids_r = await db.execute(select(Task.id).where(Task.agent_id.in_(agent_ids)))
+ task_ids = [row[0] for row in task_ids_r.all()]
+ if task_ids:
+ await db.execute(sa_delete(TaskLog).where(TaskLog.task_id.in_(task_ids)))
+ await db.execute(sa_delete(Task).where(Task.agent_id.in_(agent_ids)))
+ await db.execute(sa_delete(AuditLog).where(AuditLog.agent_id.in_(agent_ids)))
+ await db.execute(sa_delete(ApprovalRequest).where(ApprovalRequest.agent_id.in_(agent_ids)))
+ await db.execute(sa_delete(ChatMessage).where(ChatMessage.agent_id.in_(agent_ids)))
+ await db.execute(sa_delete(ChatSession).where(ChatSession.agent_id.in_(agent_ids)))
+ await db.execute(sa_delete(GatewayMessage).where(GatewayMessage.agent_id.in_(agent_ids)))
+ from app.models.agent import AgentPermission
+ await db.execute(sa_delete(AgentPermission).where(AgentPermission.agent_id.in_(agent_ids)))
+ await db.execute(sa_delete(AgentAgentRelationship).where(AgentAgentRelationship.agent_id.in_(agent_ids)))
+ await db.execute(sa_delete(AgentAgentRelationship).where(AgentAgentRelationship.target_agent_id.in_(agent_ids)))
+ await db.execute(sa_delete(AgentRelationship).where(AgentRelationship.agent_id.in_(agent_ids)))
+
+ # Null out cross-tenant FK references (other tenants' records pointing to our agents)
+ from sqlalchemy import update as sa_update
+ await db.execute(
+ sa_update(ChatSession).where(ChatSession.peer_agent_id.in_(agent_ids)).values(peer_agent_id=None)
+ )
+ await db.execute(
+ sa_update(GatewayMessage).where(GatewayMessage.sender_agent_id.in_(agent_ids)).values(sender_agent_id=None)
+ )
+
+ # 3.3 Delete tables that reference users but not tenant directly
+ if user_ids:
+ from app.models.agent import AgentTemplate
+ await db.execute(sa_delete(AgentTemplate).where(AgentTemplate.created_by.in_(user_ids)))
+
+ # 3.3b Null out cross-tenant user FK references
+ if user_ids:
+ from sqlalchemy import update as sa_update
+ await db.execute(
+ sa_update(GatewayMessage).where(GatewayMessage.sender_user_id.in_(user_ids)).values(sender_user_id=None)
+ )
+
+ # 3.4 Delete tables with tenant_id (no agent dependency)
+ await db.execute(sa_delete(DailyTokenUsage).where(DailyTokenUsage.tenant_id == company_id))
+ await db.execute(sa_delete(PublishedPage).where(PublishedPage.tenant_id == company_id))
+ await db.execute(sa_delete(OrgMember).where(OrgMember.tenant_id == company_id))
+ await db.execute(sa_delete(OrgDepartment).where(OrgDepartment.tenant_id == company_id))
+ await db.execute(sa_delete(InvitationCode).where(InvitationCode.tenant_id == company_id))
+ # Delete SkillFile before Skill (FK: skill_id -> skills.id, no cascade)
+ from app.models.skill import SkillFile
+ skill_ids_r = await db.execute(select(Skill.id).where(Skill.tenant_id == company_id))
+ skill_ids = [row[0] for row in skill_ids_r.all()]
+ if skill_ids:
+ await db.execute(sa_delete(SkillFile).where(SkillFile.skill_id.in_(skill_ids)))
+ await db.execute(sa_delete(Skill).where(Skill.tenant_id == company_id))
+ await db.execute(sa_delete(LLMModel).where(LLMModel.tenant_id == company_id))
+ await db.execute(sa_delete(TenantSetting).where(TenantSetting.tenant_id == company_id))
+
+ # 3.4b Delete identity tables with tenant_id (soft FK)
+ await db.execute(sa_delete(IdentityProvider).where(IdentityProvider.tenant_id == company_id))
+ await db.execute(sa_delete(SSOScanSession).where(SSOScanSession.tenant_id == company_id))
+
+ # 3.5 Delete agents (after all agent-dependent tables)
+ await db.execute(sa_delete(Agent).where(Agent.tenant_id == company_id))
+
+ # 3.6 Delete users (after agents, since agents.creator_id -> users.id)
+ await db.execute(sa_delete(User).where(User.tenant_id == company_id))
+
+ # 3.7 Delete the tenant itself
+ await db.delete(tenant)
+ await db.flush()
+
+ return None
diff --git a/backend/app/api/auth.py b/backend/app/api/auth.py
index dfb28a28..eb8b6b68 100644
--- a/backend/app/api/auth.py
+++ b/backend/app/api/auth.py
@@ -369,7 +369,7 @@ async def _handle_normal_register(data: UserRegister, background_tasks: Backgrou
await db.commit()
try:
from app.services.agent_seeder import seed_default_agents
- await seed_default_agents()
+ await seed_default_agents(tenant_id=user.tenant_id, creator_id=user.id)
except Exception as e:
logger.warning(f"Failed to seed default agents: {e}")
@@ -505,7 +505,16 @@ async def login(data: UserLogin, background_tasks: BackgroundTasks, db: AsyncSes
detail="Your organization has been disabled.",
)
- # 6. Generate Token
+ # Tenant-scoped login: when accessing from a company-specific domain,
+ # only allow users who belong to that company (platform_admin exempt)
+ if data.tenant_id and user.role != "platform_admin" and user.tenant_id is not None:
+ if str(user.tenant_id) != str(data.tenant_id):
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail="This account does not belong to this organization.",
+ )
+
+ needs_setup = user.tenant_id is None
token = create_access_token(str(user.id), user.role)
return TokenResponse(
access_token=token,
@@ -775,8 +784,8 @@ async def switch_tenant(
# 4. Determine redirect URL
# Determine redirect URL (Priority: sso_domain > ENV > Request > Fallback)
- from app.services.platform_service import platform_service
- redirect_url = await platform_service.get_tenant_sso_base_url(db, tenant, request)
+ from app.core.domain import resolve_base_url
+ redirect_url = await resolve_base_url(db, request=request, tenant_id=str(tenant.id) if tenant else None)
# Include token in redirect URL for cross-domain switching if needed
diff --git a/backend/app/api/chat_sessions.py b/backend/app/api/chat_sessions.py
index 08c9963c..ecec99c3 100644
--- a/backend/app/api/chat_sessions.py
+++ b/backend/app/api/chat_sessions.py
@@ -118,10 +118,10 @@ async def list_sessions(
else:
# Human session — resolve username
user_r = await db.execute(
- select(func.coalesce(User.display_name, User.username))
- .where(User.id == session.user_id)
+ select(User).where(User.id == session.user_id)
)
- display = user_r.scalar_one_or_none() or "Unknown"
+ _user = user_r.scalar_one_or_none()
+ display = (_user.display_name or _user.username or "Unknown") if _user else "Unknown"
out.append(SessionOut(
id=str(session.id),
diff --git a/backend/app/api/dingtalk.py b/backend/app/api/dingtalk.py
index ced8089b..320b72ff 100644
--- a/backend/app/api/dingtalk.py
+++ b/backend/app/api/dingtalk.py
@@ -19,6 +19,59 @@
router = APIRouter(tags=["dingtalk"])
+# --- DingTalk Corp API helpers -----------------------------------------
+import time as _time
+
+
+async def _get_corp_access_token(app_key: str, app_secret: str) -> str | None:
+ """Get corp access_token via global DingTalkTokenManager (shared with stream/reaction)."""
+ from app.services.dingtalk_token import dingtalk_token_manager
+ return await dingtalk_token_manager.get_token(app_key, app_secret)
+
+
+async def _get_dingtalk_user_detail(
+ app_key: str,
+ app_secret: str,
+ staff_id: str,
+) -> dict | None:
+ """Query DingTalk user detail via corp API to get unionId/mobile/email.
+
+ Uses /topapi/v2/user/get, requires contact.user.read permission.
+ Returns None on failure (graceful degradation).
+ """
+ import httpx
+
+ try:
+ access_token = await _get_corp_access_token(app_key, app_secret)
+ if not access_token:
+ return None
+
+ async with httpx.AsyncClient(timeout=10) as client:
+ user_resp = await client.post(
+ "https://oapi.dingtalk.com/topapi/v2/user/get",
+ params={"access_token": access_token},
+ json={"userid": staff_id, "language": "zh_CN"},
+ )
+ user_data = user_resp.json()
+
+ if user_data.get("errcode") != 0:
+ logger.warning(
+ f"[DingTalk] /topapi/v2/user/get failed for {staff_id}: "
+ f"errcode={user_data.get('errcode')} errmsg={user_data.get('errmsg')}"
+ )
+ return None
+
+ result = user_data.get("result", {})
+ return {
+ "unionid": result.get("unionid", ""),
+ "mobile": result.get("mobile", ""),
+ "email": result.get("email", "") or result.get("org_email", ""),
+ }
+
+ except Exception as e:
+ logger.warning(f"[DingTalk] _get_dingtalk_user_detail error for {staff_id}: {e}")
+ return None
+
# ─── Config CRUD ────────────────────────────────────────
@@ -80,7 +133,7 @@ async def configure_dingtalk_channel(
extra_config={"connection_mode": conn_mode},
)
db.add(config)
- await db.flush()
+ await db.commit()
# Start Stream client if in websocket mode
if conn_mode == "websocket":
@@ -136,6 +189,60 @@ async def delete_dingtalk_channel(
asyncio.create_task(dingtalk_stream_manager.stop_client(agent_id))
+# ─── Message Dedup (防止钉钉重传导致重复处理) ─────────────
+
+_processed_messages: dict[str, float] = {} # {message_id: timestamp}
+_dedup_check_counter: int = 0
+
+
+async def _check_message_dedup(message_id: str) -> bool:
+ """Check if a message_id has already been processed. Returns True if duplicate.
+
+ Uses Redis SETNX as primary (atomic, cross-process), falls back to in-memory dict.
+ """
+ global _dedup_check_counter
+
+ if not message_id:
+ return False
+
+ # Try Redis first
+ try:
+ from app.core.events import get_redis
+ redis_client = await get_redis()
+ dedup_key = f"dingtalk:dedup:{message_id}"
+ # SETNX + EX: set only if not exists, expire in 300s
+ was_set = await redis_client.set(dedup_key, "1", ex=300, nx=True)
+ if not was_set:
+ logger.info(f"[DingTalk Dedup] Duplicate message_id={message_id} (Redis)")
+ return True
+ return False
+ except Exception:
+ pass # Redis unavailable, fall back to in-memory
+
+ # In-memory fallback
+ import time as _time_dedup
+ now = _time_dedup.time()
+
+ if message_id in _processed_messages:
+ if now - _processed_messages[message_id] < 300: # 5 minutes
+ logger.info(f"[DingTalk Dedup] Duplicate message_id={message_id} (memory)")
+ return True
+
+ _processed_messages[message_id] = now
+
+ # Periodic cleanup (every 100 checks, remove entries older than 10 minutes)
+ _dedup_check_counter += 1
+ if _dedup_check_counter % 100 == 0:
+ cutoff = now - 600
+ expired = [k for k, v in _processed_messages.items() if v < cutoff]
+ for k in expired:
+ del _processed_messages[k]
+ if expired:
+ logger.debug(f"[DingTalk Dedup] Cleaned {len(expired)} expired entries")
+
+ return False
+
+
# ─── Message Processing (called by Stream callback) ────
async def process_dingtalk_message(
@@ -145,14 +252,31 @@ async def process_dingtalk_message(
conversation_id: str,
conversation_type: str,
session_webhook: str,
+ image_base64_list: list[str] | None = None,
+ saved_file_paths: list[str] | None = None,
+ sender_nick: str = "",
+ message_id: str = "",
+ sender_id: str = "",
):
- """Process an incoming DingTalk bot message and reply via session webhook."""
+ """Process an incoming DingTalk bot message and reply via session webhook.
+
+ Args:
+ image_base64_list: List of base64-encoded image data URIs for vision LLM.
+ saved_file_paths: List of local file paths where media files were saved.
+ """
+ # ── Dedup check: skip if this message_id was already processed ──
+ if await _check_message_dedup(message_id):
+ logger.info(f"[DingTalk] Skipping duplicate message_id={message_id}")
+ return
+
import json
import httpx
from datetime import datetime, timezone
from sqlalchemy import select as _select
from app.database import async_session
from app.models.agent import Agent as AgentModel
+ from app.models.user import User as UserModel
+ from sqlalchemy.orm import selectinload as _selectinload
from app.models.audit import ChatMessage
from app.services.channel_session import find_or_create_channel_session
from app.services.channel_user_service import channel_user_service
@@ -166,7 +290,7 @@ async def process_dingtalk_message(
logger.warning(f"[DingTalk] Agent {agent_id} not found")
return
creator_id = agent_obj.creator_id
- ctx_size = agent_obj.context_window_size if agent_obj else 20
+ ctx_size = agent_obj.context_window_size if agent_obj else 100
# Determine conv_id for session isolation
if conversation_type == "2":
@@ -176,16 +300,221 @@ async def process_dingtalk_message(
# P2P / single chat
conv_id = f"dingtalk_p2p_{sender_staff_id}"
- # Resolve channel user via unified service (uses OrgMember + SSO patterns)
- platform_user = await channel_user_service.resolve_channel_user(
- db=db,
- agent=agent_obj,
- channel_type="dingtalk",
- external_user_id=sender_staff_id,
- extra_info={"unionid": sender_staff_id},
+ # -- Load ChannelConfig early for DingTalk corp API calls --
+ _early_cfg_r = await db.execute(
+ _select(ChannelConfig).where(
+ ChannelConfig.agent_id == agent_id,
+ ChannelConfig.channel_type == "dingtalk",
+ )
+ )
+ _early_cfg = _early_cfg_r.scalar_one_or_none()
+ _early_app_key = _early_cfg.app_id if _early_cfg else None
+ _early_app_secret = _early_cfg.app_secret if _early_cfg else None
+
+ # -- Multi-dimension user matching (optimized: local-first, API-last) --
+ from app.models.org import OrgMember
+ from app.models.identity import IdentityProvider
+ from sqlalchemy import or_ as _or
+
+ dt_username = f"dingtalk_{sender_staff_id}"
+ platform_user = None
+ dt_unionid = ""
+ dt_mobile = ""
+ dt_email = ""
+ matched_via = ""
+
+ # Find the DingTalk identity provider for this tenant
+ _ip_r = await db.execute(
+ _select(IdentityProvider).where(
+ IdentityProvider.provider_type == "dingtalk",
+ IdentityProvider.tenant_id == agent_obj.tenant_id,
+ )
)
+ _dingtalk_provider = _ip_r.scalar_one_or_none()
+
+ # Step 1: Match via sender_staff_id in org_members.external_id (企业 userId,最稳定)
+ if sender_staff_id and _dingtalk_provider and not platform_user:
+ _om_r = await db.execute(
+ _select(OrgMember).where(
+ OrgMember.provider_id == _dingtalk_provider.id,
+ OrgMember.external_id == sender_staff_id,
+ OrgMember.status == "active",
+ )
+ )
+ _om = _om_r.scalar_one_or_none()
+ if _om and _om.user_id:
+ _u_r = await db.execute(_select(UserModel).where(UserModel.id == _om.user_id).options(_selectinload(UserModel.identity)))
+ platform_user = _u_r.scalar_one_or_none()
+ if platform_user:
+ matched_via = "org_member.external_id(staff_id)"
+ logger.info(f"[DingTalk] Step1: Matched user via staff_id {sender_staff_id}: {platform_user.username}")
+
+ # Step 2: Match via username = dingtalk_{staffId} (兼容旧用户)
+ if sender_staff_id and not platform_user:
+ from app.models.user import Identity as _IdentityModel
+ from sqlalchemy.orm import selectinload as _selectinload
+ _u_r = await db.execute(
+ _select(UserModel).join(UserModel.identity).where(_IdentityModel.username == dt_username).options(_selectinload(UserModel.identity))
+ )
+ platform_user = _u_r.scalar_one_or_none()
+ if platform_user:
+ matched_via = "username"
+ logger.info(f"[DingTalk] Step2: Matched user via username {dt_username}")
+
+ # Step 3: Call DingTalk API to get unionId/mobile/email (仅首次未匹配时)
+ if not platform_user and _early_app_key and _early_app_secret and sender_staff_id:
+ dt_user_detail = await _get_dingtalk_user_detail(
+ _early_app_key, _early_app_secret, sender_staff_id
+ )
+ if dt_user_detail:
+ dt_unionid = dt_user_detail.get("unionid", "")
+ dt_mobile = dt_user_detail.get("mobile", "")
+ dt_email = dt_user_detail.get("email", "") or dt_user_detail.get("org_email", "")
+ logger.info(f"[DingTalk] Step3: user_detail for {sender_staff_id}: unionid={dt_unionid}, mobile={dt_mobile}, email={dt_email}")
+
+ # 3a: unionId 查 org_members(跨通道匹配 SSO 用户)
+ if dt_unionid and _dingtalk_provider and not platform_user:
+ _om_r = await db.execute(
+ _select(OrgMember).where(
+ OrgMember.provider_id == _dingtalk_provider.id,
+ OrgMember.status == "active",
+ _or(
+ OrgMember.unionid == dt_unionid,
+ OrgMember.external_id == dt_unionid,
+ ),
+ )
+ )
+ _om = _om_r.scalar_one_or_none()
+ if _om and _om.user_id:
+ _u_r = await db.execute(_select(UserModel).where(UserModel.id == _om.user_id).options(_selectinload(UserModel.identity)))
+ platform_user = _u_r.scalar_one_or_none()
+ if platform_user:
+ matched_via = "org_member.unionid"
+ logger.info(f"[DingTalk] Step3a: Matched user via unionid {dt_unionid}: {platform_user.username}")
+
+ # 3b: mobile 匹配
+ if dt_mobile and not platform_user:
+ _u_r = await db.execute(
+ _select(UserModel).join(UserModel.identity).where(
+ _IdentityModel.phone == dt_mobile,
+ UserModel.tenant_id == agent_obj.tenant_id,
+ ).options(_selectinload(UserModel.identity))
+ )
+ platform_user = _u_r.scalar_one_or_none()
+ if platform_user:
+ matched_via = "mobile"
+ logger.info(f"[DingTalk] Step3b: Matched user via mobile: {platform_user.username}")
+
+ # 3c: email 匹配
+ if dt_email and not platform_user:
+ _u_r = await db.execute(
+ _select(UserModel).join(UserModel.identity).where(
+ _IdentityModel.email == dt_email,
+ UserModel.tenant_id == agent_obj.tenant_id,
+ ).options(_selectinload(UserModel.identity))
+ )
+ platform_user = _u_r.scalar_one_or_none()
+ if platform_user:
+ matched_via = "email"
+ logger.info(f"[DingTalk] Step3c: Matched user via email: {platform_user.username}")
+
+
+ # Step 4: No match found — create new user
+ if not platform_user:
+ import uuid as _uuid
+ from app.services.registration_service import registration_service as _reg_svc
+ # Create or find Identity first
+ _identity = await _reg_svc.find_or_create_identity(
+ db,
+ email=dt_email or f"{dt_username}@dingtalk.local",
+ phone=dt_mobile or None,
+ username=dt_username,
+ password=_uuid.uuid4().hex,
+ )
+ # Create tenant-scoped User
+ platform_user = UserModel(
+ identity_id=_identity.id,
+ display_name=sender_nick or f"DingTalk {sender_staff_id[:8]}",
+ role="member",
+ tenant_id=agent_obj.tenant_id if agent_obj else None,
+ source="dingtalk",
+ is_active=True,
+ )
+ db.add(platform_user)
+ await db.flush()
+ platform_user.identity = _identity
+ matched_via = "created"
+ logger.info(f"[DingTalk] Step4: Created new user: {dt_username}")
+ else:
+ # Update display_name, source, mobile, email for existing users
+ updated = False
+ if sender_nick and platform_user.display_name != sender_nick:
+ platform_user.display_name = sender_nick
+ updated = True
+ if not platform_user.source or platform_user.source == "web":
+ platform_user.source = "dingtalk"
+ updated = True
+ # 补充 mobile/email(通讯录获取的信息写入已有用户的 Identity)
+ if platform_user.identity:
+ if dt_mobile and not platform_user.identity.phone:
+ platform_user.identity.phone = dt_mobile
+ updated = True
+ if dt_email and (not platform_user.identity.email or platform_user.identity.email.endswith((".local",))):
+ platform_user.identity.email = dt_email
+ updated = True
+ if updated:
+ await db.flush()
+
+ # -- Ensure org_member record exists (for future Step 1 fast-path) --
+ if _dingtalk_provider and sender_staff_id:
+ _om_check_r = await db.execute(
+ _select(OrgMember).where(
+ OrgMember.user_id == platform_user.id,
+ OrgMember.provider_id == _dingtalk_provider.id,
+ )
+ )
+ _existing_om = _om_check_r.scalar_one_or_none()
+ if not _existing_om:
+ # Create org_member so next message hits Step 1 directly
+ _new_om = OrgMember(
+ user_id=platform_user.id,
+ provider_id=_dingtalk_provider.id,
+ external_id=sender_staff_id,
+ unionid=dt_unionid or None,
+ name=sender_nick or platform_user.display_name or dt_username,
+ status="active",
+ tenant_id=agent_obj.tenant_id,
+ )
+ db.add(_new_om)
+ await db.flush()
+ logger.info(f"[DingTalk] Created org_member for user {platform_user.username}, external_id={sender_staff_id}")
+ elif _existing_om.external_id != sender_staff_id:
+ # Update external_id to sender_staff_id (企业 userId)
+ _existing_om.external_id = sender_staff_id
+ if dt_unionid and not _existing_om.unionid:
+ _existing_om.unionid = dt_unionid
+ await db.flush()
+ logger.info(f"[DingTalk] Updated org_member external_id to {sender_staff_id} for user {platform_user.username}")
+
platform_user_id = platform_user.id
+ # Check for channel commands (/new, /reset)
+ from app.services.channel_commands import is_channel_command, handle_channel_command
+ if is_channel_command(user_text):
+ cmd_result = await handle_channel_command(
+ db=db, command=user_text, agent_id=agent_id,
+ user_id=platform_user_id, external_conv_id=conv_id,
+ source_channel="dingtalk",
+ )
+ await db.commit()
+ import httpx as _httpx_cmd
+ async with _httpx_cmd.AsyncClient(timeout=10) as _cl_cmd:
+ await _cl_cmd.post(session_webhook, json={
+ "msgtype": "text",
+ "text": {"content": cmd_result["message"]},
+ })
+ return
+
# Find or create session
sess = await find_or_create_channel_session(
db=db,
@@ -206,23 +535,157 @@ async def process_dingtalk_message(
)
history = [{"role": m.role, "content": m.content} for m in reversed(history_r.scalars().all())]
- # Save user message
+ # Re-hydrate historical images for multi-turn LLM context
+ from app.services.image_context import rehydrate_image_messages
+ history = rehydrate_image_messages(history, agent_id, max_images=3)
+
+ # Save user message — use display-friendly format for DB (no base64)
+ # Build saved_content: [file:name] prefix for each saved file + clean text
+ import re as _re_dt
+ _clean_text = _re_dt.sub(
+ r'\[image_data:data:image/[^;]+;base64,[A-Za-z0-9+/=]+\]',
+ "", user_text,
+ ).strip()
+ if saved_file_paths:
+ from pathlib import Path as _PathDT
+ _file_prefixes = "\n".join(
+ f"[file:{_PathDT(p).name}]" for p in saved_file_paths
+ )
+ saved_content = f"{_file_prefixes}\n{_clean_text}".strip() if _clean_text else _file_prefixes
+ else:
+ saved_content = _clean_text or user_text
db.add(ChatMessage(
agent_id=agent_id, user_id=platform_user_id,
- role="user", content=user_text,
+ role="user", content=saved_content,
conversation_id=session_conv_id,
))
sess.last_message_at = datetime.now(timezone.utc)
await db.commit()
+ # ── Set up channel_file_sender so the agent can send files via DingTalk ──
+ from app.services.agent_tools import channel_file_sender as _cfs
+ from app.services.dingtalk_stream import (
+ _upload_dingtalk_media,
+ _send_dingtalk_media_message,
+ )
+
+ # Load DingTalk credentials from ChannelConfig
+ _dt_cfg_r = await db.execute(
+ _select(ChannelConfig).where(
+ ChannelConfig.agent_id == agent_id,
+ ChannelConfig.channel_type == "dingtalk",
+ )
+ )
+ _dt_cfg = _dt_cfg_r.scalar_one_or_none()
+ _dt_app_key = _dt_cfg.app_id if _dt_cfg else None
+ _dt_app_secret = _dt_cfg.app_secret if _dt_cfg else None
+
+ _cfs_token = None
+ if _dt_app_key and _dt_app_secret:
+ # Determine send target: group → conversation_id, P2P → sender_staff_id
+ _dt_target_id = conversation_id if conversation_type == "2" else sender_staff_id
+ _dt_conv_type = conversation_type
+
+ async def _dingtalk_file_sender(file_path: str, msg: str = ""):
+ """Send a file/image/video via DingTalk proactive message API."""
+ from pathlib import Path as _P
+
+ _fp = _P(file_path)
+ _ext = _fp.suffix.lower()
+
+ # Determine media type from extension
+ if _ext in (".jpg", ".jpeg", ".png", ".gif", ".bmp", ".webp"):
+ _media_type = "image"
+ elif _ext in (".mp4", ".mov", ".avi", ".mkv"):
+ _media_type = "video"
+ elif _ext in (".mp3", ".wav", ".ogg", ".amr", ".m4a"):
+ _media_type = "voice"
+ else:
+ _media_type = "file"
+
+ # Upload media to DingTalk
+ _mid = await _upload_dingtalk_media(
+ _dt_app_key, _dt_app_secret, file_path, _media_type
+ )
+
+ if _mid:
+ # Send via proactive message API
+ _ok = await _send_dingtalk_media_message(
+ _dt_app_key, _dt_app_secret,
+ _dt_target_id, _mid, _media_type,
+ _dt_conv_type, filename=_fp.name,
+ )
+ if _ok:
+ # Also send accompany text if provided
+ if msg:
+ try:
+ async with httpx.AsyncClient(timeout=10) as _cl:
+ await _cl.post(session_webhook, json={
+ "msgtype": "text",
+ "text": {"content": msg},
+ })
+ except Exception:
+ pass
+ return
+
+ # Fallback: send a text message with download link
+ from pathlib import Path as _P2
+ from app.config import get_settings as _gs_fallback
+ _fs = _gs_fallback()
+ _base_url = getattr(_fs, 'BASE_URL', '').rstrip('/') or ''
+ _fp2 = _P2(file_path)
+ _ws_root = _P2(_fs.AGENT_DATA_DIR)
+ try:
+ _rel = str(_fp2.relative_to(_ws_root / str(agent_id)))
+ except ValueError:
+ _rel = _fp2.name
+ _fallback_parts = []
+ if msg:
+ _fallback_parts.append(msg)
+ if _base_url:
+ _dl_url = f"{_base_url}/api/agents/{agent_id}/files/download?path={_rel}"
+ _fallback_parts.append(f"📎 {_fp2.name}\n🔗 {_dl_url}")
+ _fallback_parts.append("⚠️ 文件通过钉钉直接发送失败,请通过上方链接下载。")
+ try:
+ async with httpx.AsyncClient(timeout=10) as _cl:
+ await _cl.post(session_webhook, json={
+ "msgtype": "text",
+ "text": {"content": "\n\n".join(_fallback_parts)},
+ })
+ except Exception as _fb_err:
+ logger.error(f"[DingTalk] Fallback file text also failed: {_fb_err}")
+
+ _cfs_token = _cfs.set(_dingtalk_file_sender)
+
# Call LLM
- reply_text = await _call_agent_llm(
- db, agent_id, user_text,
- history=history, user_id=platform_user_id,
+ try:
+ reply_text = await _call_agent_llm(
+ db, agent_id, user_text,
+ history=history, user_id=platform_user_id,
+ )
+ finally:
+ # Reset ContextVar
+ if _cfs_token is not None:
+ _cfs.reset(_cfs_token)
+ # Recall thinking reaction (before sending reply)
+ if message_id and _dt_app_key:
+ try:
+ from app.services.dingtalk_reaction import recall_thinking_reaction
+ await recall_thinking_reaction(
+ _dt_app_key, _dt_app_secret,
+ message_id, conversation_id,
+ )
+ except Exception as _recall_err:
+ logger.warning(f"[DingTalk] Failed to recall thinking reaction: {_recall_err}")
+
+ has_media = bool(image_base64_list or saved_file_paths)
+ logger.info(
+ f"[DingTalk] LLM reply ({('media' if has_media else 'text')} input): "
+ f"{reply_text[:100]}"
)
- logger.info(f"[DingTalk] LLM reply: {reply_text[:100]}")
# Reply via session webhook (markdown)
+ # Note: File/image sending is handled by channel_file_sender ContextVar above.
try:
async with httpx.AsyncClient(timeout=10) as client:
await client.post(session_webhook, json={
diff --git a/backend/app/api/discord_bot.py b/backend/app/api/discord_bot.py
index da2ee4fe..27336488 100644
--- a/backend/app/api/discord_bot.py
+++ b/backend/app/api/discord_bot.py
@@ -291,7 +291,7 @@ async def handle_in_background():
agent_r = await bg_db.execute(select(AgentModel).where(AgentModel.id == agent_id))
agent_obj = agent_r.scalar_one_or_none()
creator_id = agent_obj.creator_id if agent_obj else agent_id
- ctx_size = agent_obj.context_window_size if agent_obj else 20
+ ctx_size = agent_obj.context_window_size if agent_obj else 100
# Find-or-create platform user for this Discord sender via unified service
from app.services.channel_user_service import channel_user_service
diff --git a/backend/app/api/enterprise.py b/backend/app/api/enterprise.py
index e68484d8..81343839 100644
--- a/backend/app/api/enterprise.py
+++ b/backend/app/api/enterprise.py
@@ -19,6 +19,8 @@
from app.models.agent import Agent
from app.models.llm import LLMModel
from app.models.audit import AuditLog, ApprovalRequest, EnterpriseInfo
+from app.schemas.oauth2 import OAuth2ProviderCreate, OAuth2ProviderUpdate, OAuth2Config
+from app.schemas.oauth2 import OAuth2ProviderCreate, OAuth2ProviderUpdate, OAuth2Config
from app.schemas.schemas import (
ApprovalAction, ApprovalRequestOut, AuditLogOut, EnterpriseInfoOut,
EnterpriseInfoUpdate, LLMModelCreate, LLMModelOut, LLMModelUpdate,
@@ -28,6 +30,7 @@
from app.services.enterprise_sync import enterprise_sync_service
from app.services.llm_utils import get_provider_manifest
from app.services.platform_service import platform_service
+from app.core.domain import resolve_base_url
from app.services.sso_service import sso_service
router = APIRouter(prefix="/enterprise", tags=["enterprise"])
@@ -680,8 +683,8 @@ async def _sync_tenant_sso_state(db: AsyncSession, tenant_id: uuid.UUID):
tenant.sso_enabled = active_sso_count > 0
# Auto-assign subdomain on first SSO enablement based on Platform rules
- if tenant.sso_enabled and not tenant.sso_domain:
- sso_base = await platform_service.get_tenant_sso_base_url(db, tenant)
+ if tenant.sso_enabled and not tenant.sso_domain and not getattr(tenant, 'is_default', False):
+ sso_base = await resolve_base_url(db, tenant_id=str(tenant.id) if tenant else None)
host = sso_base.split("://")[-1].split(":")[0].split("/")[0]
is_ip = platform_service.is_ip_address(host)
@@ -708,7 +711,7 @@ async def _regenerate_all_sso_domains(db: AsyncSession):
The first SSO-enabled tenant keeps it; all others get sso_domain=None.
If no SSO-enabled tenant exists, the first tenant in the list gets it.
"""
- base_url = await platform_service.get_public_base_url(db)
+ base_url = await resolve_base_url(db)
host = base_url.split("://")[-1].split(":")[0].split("/")[0]
is_ip = platform_service.is_ip_address(host)
@@ -722,14 +725,17 @@ async def _regenerate_all_sso_domains(db: AsyncSession):
if is_ip:
# IP mode: only one tenant can have SSO domain
if i == 0:
- sso_base = await platform_service.get_tenant_sso_base_url(db, tenant)
+ sso_base = await resolve_base_url(db, tenant_id=str(tenant.id) if tenant else None)
tenant.sso_domain = sso_base
else:
tenant.sso_domain = None
else:
- # Domain mode: each tenant gets their own subdomain
- sso_base = await platform_service.get_tenant_sso_base_url(db, tenant)
- tenant.sso_domain = sso_base
+ # Domain mode: each tenant gets their own subdomain (skip default tenant)
+ if getattr(tenant, 'is_default', False):
+ tenant.sso_domain = None
+ else:
+ sso_base = await resolve_base_url(db, tenant_id=str(tenant.id) if tenant else None)
+ tenant.sso_domain = sso_base
logger.info(f"[SSO regen] tenant={tenant.slug} sso_domain={tenant.sso_domain}")
if tenants:
@@ -782,6 +788,13 @@ class IdentityProviderCreate(BaseModel):
tenant_id: uuid.UUID | None = None
+class IdentityProviderUpdate(BaseModel):
+ name: str | None = None
+ is_active: bool | None = None
+ sso_login_enabled: bool | None = None
+ config: dict | None = None
+
+
class OAuth2Config(BaseModel):
"""OAuth2 provider configuration with friendly field names."""
app_id: str | None = None # Alias for client_id
@@ -790,6 +803,7 @@ class OAuth2Config(BaseModel):
token_url: str | None = None # OAuth2 token endpoint
user_info_url: str | None = None # OAuth2 user info endpoint
scope: str | None = "openid profile email"
+ field_mapping: dict | None = None # Custom field name mapping
def to_config_dict(self) -> dict:
"""Convert to config dict with both naming conventions for compatibility."""
@@ -808,6 +822,8 @@ def to_config_dict(self) -> dict:
config["user_info_url"] = self.user_info_url
if self.scope:
config["scope"] = self.scope
+ if self.field_mapping is not None:
+ config["field_mapping"] = self.field_mapping
return config
@classmethod
@@ -820,6 +836,7 @@ def from_config_dict(cls, config: dict) -> "OAuth2Config":
token_url=config.get("token_url"),
user_info_url=config.get("user_info_url"),
scope=config.get("scope"),
+ field_mapping=config.get("field_mapping"),
)
@@ -834,6 +851,7 @@ class IdentityProviderOAuth2Create(BaseModel):
token_url: str
user_info_url: str
scope: str | None = "openid profile email"
+ field_mapping: dict | None = None # Custom field name mapping
tenant_id: uuid.UUID | None = None
@@ -920,74 +938,50 @@ async def create_identity_provider(
@router.post("/identity-providers/oauth2", response_model=IdentityProviderOut)
async def create_oauth2_provider(
- data: IdentityProviderOAuth2Create,
+ data: OAuth2ProviderCreate,
current_user: User = Depends(get_current_admin),
db: AsyncSession = Depends(get_db),
):
- """Create a new OAuth2 identity provider with simplified fields (app_id, app_secret, authorize_url, etc.)."""
- # Convert to config dict
- oauth_config = OAuth2Config(
- app_id=data.app_id,
- app_secret=data.app_secret,
- authorize_url=data.authorize_url,
- token_url=data.token_url,
- user_info_url=data.user_info_url,
- scope=data.scope,
- )
- config = oauth_config.to_config_dict()
-
- # Validate
- validate_provider_config("oauth2", config)
-
- # Validate and determine tenant_id
- tid = data.tenant_id
- if current_user.role == "platform_admin":
- # Platform admins can use any tenant_id (including None for global providers)
- pass
- else:
- # Non-platform admins: use request tenant_id if provided, else fall back to user's tenant
- if tid is None:
- tid = current_user.tenant_id
- elif str(tid) != str(current_user.tenant_id):
- # Validate they can only manage their own tenant
- raise HTTPException(status_code=403, detail="Can only create providers for your own tenant")
-
+ """Create OAuth2 provider - strict config format only."""
+ # Validate tenant
+ tid = data.tenant_id or (str(current_user.tenant_id) if current_user.tenant_id else None)
if not tid:
- raise HTTPException(status_code=400, detail="tenant_id is required to create an identity provider")
+ raise HTTPException(status_code=400, detail="tenant_id is required")
+ # Build config dict from validated model
+ config_dict = data.config.model_dump(mode='json', exclude_unset=True)
+
+ # Handle field_mapping: None means no mapping, empty dict also means None
+ if config_dict.get('field_mapping') == {}:
+ config_dict['field_mapping'] = None
+
provider = IdentityProvider(
provider_type="oauth2",
name=data.name,
is_active=data.is_active,
- config=config,
- tenant_id=tid
+ sso_login_enabled=data.sso_login_enabled,
+ config=config_dict,
+ tenant_id=uuid.UUID(tid) if tid else None,
)
db.add(provider)
await db.commit()
await db.refresh(provider)
- return IdentityProviderOut.model_validate(provider)
+ # Sync tenant SSO state
+ if provider.tenant_id:
+ await _sync_tenant_sso_state(db, provider.tenant_id)
-class OAuth2ConfigUpdate(BaseModel):
- """OAuth2 provider configuration update with dedicated fields."""
- name: str | None = None
- is_active: bool | None = None
- app_id: str | None = None
- app_secret: str | None = None # Set to None to keep existing, empty to clear
- authorize_url: str | None = None
- token_url: str | None = None
- user_info_url: str | None = None
- scope: str | None = None
+ return IdentityProviderOut.model_validate(provider)
@router.patch("/identity-providers/{provider_id}/oauth2", response_model=IdentityProviderOut)
async def update_oauth2_provider(
provider_id: uuid.UUID,
- data: OAuth2ConfigUpdate,
+ data: OAuth2ProviderUpdate,
current_user: User = Depends(get_current_admin),
db: AsyncSession = Depends(get_db),
):
- """Update an OAuth2 identity provider with simplified fields."""
+ """Update OAuth2 provider - config format only, no backward compatibility."""
result = await db.execute(select(IdentityProvider).where(IdentityProvider.id == provider_id))
provider = result.scalar_one_or_none()
if not provider:
@@ -999,37 +993,32 @@ async def update_oauth2_provider(
if current_user.role != "platform_admin" and provider.tenant_id != current_user.tenant_id:
raise HTTPException(status_code=403, detail="Not authorized to update this provider")
- # Update name and is_active
+ # Update basic fields
if data.name is not None:
provider.name = data.name
if data.is_active is not None:
provider.is_active = data.is_active
+ if data.sso_login_enabled is not None:
+ provider.sso_login_enabled = data.sso_login_enabled
- # Update config fields
- if any([data.app_id, data.app_secret is not None, data.authorize_url, data.token_url, data.user_info_url, data.scope]):
+ # Update config if provided
+ if data.config is not None:
+ config_dict = data.config.model_dump(mode='json', exclude_unset=True)
current_config = provider.config.copy()
-
- if data.app_id is not None:
- current_config["app_id"] = data.app_id
- current_config["client_id"] = data.app_id
- if data.app_secret is not None:
- # Only update if explicitly set (not None) - allows clearing
- if data.app_secret:
- current_config["app_secret"] = data.app_secret
- current_config["client_secret"] = data.app_secret
- else:
- current_config.pop("app_secret", None)
- current_config.pop("client_secret", None)
- if data.authorize_url is not None:
- current_config["authorize_url"] = data.authorize_url
- if data.token_url is not None:
- current_config["token_url"] = data.token_url
- if data.user_info_url is not None:
- current_config["user_info_url"] = data.user_info_url
- if data.scope is not None:
- current_config["scope"] = data.scope
-
- # Validate the updated config
+
+ # Merge config fields
+ for key, value in config_dict.items():
+ if key == 'field_mapping':
+ # field_mapping: None = clear, {} = use defaults, {...} = custom mapping
+ if value is None:
+ current_config.pop('field_mapping', None)
+ elif value == {}:
+ current_config.pop('field_mapping', None)
+ else:
+ current_config['field_mapping'] = value
+ elif value is not None:
+ current_config[key] = value
+
validate_provider_config("oauth2", current_config)
provider.config = current_config
@@ -1038,13 +1027,6 @@ async def update_oauth2_provider(
return IdentityProviderOut.model_validate(provider)
-class IdentityProviderUpdate(BaseModel):
- name: str | None = None
- is_active: bool | None = None
- sso_login_enabled: bool | None = None
- config: dict | None = None
-
-
@router.put("/identity-providers/{provider_id}", response_model=IdentityProviderOut)
async def update_identity_provider(
provider_id: uuid.UUID,
@@ -1208,8 +1190,10 @@ async def list_org_members(
if str(current_user.tenant_id) != tenant_id:
raise HTTPException(status_code=403, detail="Cannot access other tenant's data")
- query = select(OrgMember, IdentityProvider.name.label("provider_name"), IdentityProvider.provider_type).outerjoin(
+ query = select(OrgMember, IdentityProvider.name.label("provider_name"), IdentityProvider.provider_type, User.display_name.label("user_display_name")).outerjoin(
IdentityProvider, OrgMember.provider_id == IdentityProvider.id
+ ).outerjoin(
+ User, OrgMember.user_id == User.id
).where(OrgMember.status == "active")
if tenant_id:
query = query.where(OrgMember.tenant_id == uuid.UUID(tenant_id))
@@ -1254,6 +1238,7 @@ async def list_org_members(
"id": str(m.id),
"name": m.name,
"email": m.email,
+ "phone": m.phone,
"title": m.title,
"department_path": m.department_path,
"avatar_url": m.avatar_url,
@@ -1261,8 +1246,9 @@ async def list_org_members(
"provider_id": str(m.provider_id) if m.provider_id else None,
"provider_name": provider_name if m.provider_id else None,
"provider_type": provider_type if m.provider_id else None,
+ "user_display_name": user_display_name,
}
- for m, provider_name, provider_type in rows
+ for m, provider_name, provider_type, user_display_name in rows
]
@@ -1366,7 +1352,7 @@ async def invite_users(
if not tenant:
raise HTTPException(status_code=404, detail="Company not found")
- base_url = await platform_service.get_public_base_url(db, request=request)
+ base_url = await resolve_base_url(db, request=request)
invited_count = 0
codes = []
diff --git a/backend/app/api/files.py b/backend/app/api/files.py
index 3b8f7f53..ea49f2ef 100644
--- a/backend/app/api/files.py
+++ b/backend/app/api/files.py
@@ -21,6 +21,9 @@
settings = get_settings()
router = APIRouter(prefix="/agents/{agent_id}/files", tags=["files"])
+# Files only visible/editable by agent creator (platform_admin also allowed)
+CREATOR_ONLY_FILES = {"secrets.md"}
+
class FileInfo(BaseModel):
name: str
@@ -61,7 +64,8 @@ async def list_files(
db: AsyncSession = Depends(get_db),
):
"""List files and directories in an agent's file system."""
- await check_agent_access(db, current_user, agent_id)
+ agent, _access = await check_agent_access(db, current_user, agent_id)
+ is_creator = (agent.creator_id == current_user.id) or (current_user.role == "platform_admin")
target = _safe_path(agent_id, path)
if not target.exists():
@@ -74,6 +78,9 @@ async def list_files(
for entry in sorted(target.iterdir(), key=lambda e: (not e.is_dir(), e.name)):
if entry.name == '.gitkeep':
continue
+ # Hide creator-only files from non-creators
+ if entry.name in CREATOR_ONLY_FILES and not is_creator:
+ continue
rel = str(entry.resolve().relative_to(base_abs))
stat = entry.stat()
items.append(FileInfo(
@@ -95,9 +102,15 @@ async def read_file(
db: AsyncSession = Depends(get_db),
):
"""Read the content of a file."""
- await check_agent_access(db, current_user, agent_id)
+ agent, _access = await check_agent_access(db, current_user, agent_id)
+ is_creator = (agent.creator_id == current_user.id) or (current_user.role == "platform_admin")
target = _safe_path(agent_id, path)
+ # Block non-creators from reading creator-only files
+ filename = Path(path).name
+ if filename in CREATOR_ONLY_FILES and not is_creator:
+ raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied")
+
if not target.exists() or not target.is_file():
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="File not found")
@@ -143,7 +156,11 @@ async def download_file(
if not user or not user.is_active:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="User not found or inactive")
- await check_agent_access(db, user, agent_id)
+ agent, _access = await check_agent_access(db, user, agent_id)
+ is_creator = (agent.creator_id == user.id) or (user.role == "platform_admin")
+ filename = Path(path).name
+ if filename in CREATOR_ONLY_FILES and not is_creator:
+ raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied")
target = _safe_path(agent_id, path)
if not target.exists() or not target.is_file():
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="File not found")
@@ -159,7 +176,11 @@ async def write_file(
db: AsyncSession = Depends(get_db),
):
"""Write content to a file (create or overwrite)."""
- await check_agent_access(db, current_user, agent_id)
+ agent, _access = await check_agent_access(db, current_user, agent_id)
+ is_creator = (agent.creator_id == current_user.id) or (current_user.role == "platform_admin")
+ filename = Path(path).name
+ if filename in CREATOR_ONLY_FILES and not is_creator:
+ raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied")
target = _safe_path(agent_id, path)
target.parent.mkdir(parents=True, exist_ok=True)
@@ -177,7 +198,11 @@ async def delete_file(
db: AsyncSession = Depends(get_db),
):
"""Delete a file."""
- await check_agent_access(db, current_user, agent_id)
+ agent, _access = await check_agent_access(db, current_user, agent_id)
+ is_creator = (agent.creator_id == current_user.id) or (current_user.role == "platform_admin")
+ filename = Path(path).name
+ if filename in CREATOR_ONLY_FILES and not is_creator:
+ raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied")
target = _safe_path(agent_id, path)
if not target.exists():
diff --git a/backend/app/api/gateway.py b/backend/app/api/gateway.py
index 8182562b..40f1a017 100644
--- a/backend/app/api/gateway.py
+++ b/backend/app/api/gateway.py
@@ -10,7 +10,7 @@
import uuid
from datetime import datetime, timezone
-from fastapi import APIRouter, Header, HTTPException, Depends, BackgroundTasks
+from fastapi import APIRouter, Header, HTTPException, Depends, BackgroundTasks, Request
from loguru import logger
from sqlalchemy import select, update
from sqlalchemy.ext.asyncio import AsyncSession
@@ -670,6 +670,7 @@ async def send_message(
@router.get("/setup-guide/{agent_id}")
async def get_setup_guide(
agent_id: uuid.UUID,
+ request: Request,
x_api_key: str = Header(..., alias="X-Api-Key"),
db: AsyncSession = Depends(get_db),
):
@@ -678,8 +679,9 @@ async def get_setup_guide(
if agent.id != agent_id:
raise HTTPException(status_code=403, detail="Key does not match this agent")
- # Note: we use the raw key from the header since the agent already authenticated
- base_url = "https://try.clawith.ai"
+ # Resolve base URL dynamically using tenant fallback chain
+ from app.core.domain import resolve_base_url
+ base_url = await resolve_base_url(db, request=request, tenant_id=str(agent.tenant_id))
skill_content = f"""---
name: clawith_sync
diff --git a/backend/app/api/slack.py b/backend/app/api/slack.py
index 9861c061..35b7c6f3 100644
--- a/backend/app/api/slack.py
+++ b/backend/app/api/slack.py
@@ -229,7 +229,7 @@ async def slack_event_webhook(
agent_r = await db.execute(select(AgentModel).where(AgentModel.id == agent_id))
agent_obj = agent_r.scalar_one_or_none()
creator_id = agent_obj.creator_id if agent_obj else agent_id
- ctx_size = agent_obj.context_window_size if agent_obj else 20
+ ctx_size = agent_obj.context_window_size if agent_obj else 100
# Find-or-create platform user for this Slack sender via unified service
from app.services.channel_user_service import channel_user_service
diff --git a/backend/app/api/sso.py b/backend/app/api/sso.py
index 1c521024..68202188 100644
--- a/backend/app/api/sso.py
+++ b/backend/app/api/sso.py
@@ -8,6 +8,7 @@
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import get_db
+from loguru import logger
from app.models.identity import SSOScanSession, IdentityProvider
from app.schemas.schemas import TokenResponse, UserOut
@@ -104,15 +105,13 @@ async def get_sso_config(sid: uuid.UUID, request: Request, db: AsyncSession = De
result = await db.execute(query)
providers = result.scalars().all()
- # Determine the base URL for OAuth callbacks using centralized platform service:
- from app.services.platform_service import platform_service
- if session.tenant_id:
- from app.models.tenant import Tenant
- tenant_result = await db.execute(select(Tenant).where(Tenant.id == session.tenant_id))
- tenant_obj = tenant_result.scalar_one_or_none()
- public_base = await platform_service.get_tenant_sso_base_url(db, tenant_obj, request)
- else:
- public_base = await platform_service.get_public_base_url(db, request)
+ # Determine the base URL for OAuth callbacks:
+ # Use resolve_base_url which has the correct 5-level fallback chain
+ from app.core.domain import resolve_base_url
+ public_base = await resolve_base_url(
+ db, request=request,
+ tenant_id=str(session.tenant_id) if session.tenant_id else None
+ )
auth_urls = []
for p in providers:
@@ -141,5 +140,111 @@ async def get_sso_config(sid: uuid.UUID, request: Request, db: AsyncSession = De
url = f"https://open.work.weixin.qq.com/wwopen/sso/qrConnect?appid={corp_id}&agentid={agent_id}&redirect_uri={quote(redir)}&state={sid}"
auth_urls.append({"provider_type": "wecom", "name": p.name, "url": url})
+ elif p.provider_type == "oauth2":
+ from app.services.auth_registry import auth_provider_registry
+ auth_provider = await auth_provider_registry.get_provider(
+ db, "oauth2", str(session.tenant_id) if session.tenant_id else None
+ )
+ if auth_provider:
+ redir = f"{public_base}/api/auth/oauth2/callback"
+ url = await auth_provider.get_authorization_url(redir, str(sid))
+ auth_urls.append({"provider_type": "oauth2", "name": p.name, "url": url})
+
return auth_urls
+@router.get("/auth/oauth2/callback")
+async def oauth2_callback(
+ code: str,
+ state: str = None,
+ db: AsyncSession = Depends(get_db),
+):
+ """Callback for Generic OAuth2 SSO login."""
+ from app.core.security import create_access_token
+ from fastapi.responses import HTMLResponse
+ from app.services.auth_registry import auth_provider_registry
+
+ # 1. 从 state (=sid) 获取 tenant 上下文
+ tenant_id = None
+ sid = None
+ if state:
+ try:
+ sid = uuid.UUID(state)
+ s_res = await db.execute(select(SSOScanSession).where(SSOScanSession.id == sid))
+ session = s_res.scalar_one_or_none()
+ if session:
+ tenant_id = session.tenant_id
+ except (ValueError, AttributeError):
+ pass
+
+ # 2. 获取 OAuth2 provider
+ auth_provider = await auth_provider_registry.get_provider(
+ db, "oauth2", str(tenant_id) if tenant_id else None
+ )
+ if not auth_provider:
+ return HTMLResponse("Auth failed: OAuth2 provider not configured")
+
+ # 3. 换 token → 获取用户信息 → 查找/创建用户
+ try:
+ token_data = await auth_provider.exchange_code_for_token(code)
+ access_token = token_data.get("access_token")
+ if not access_token:
+ logger.error(f"OAuth2 token exchange failed: {token_data}")
+ return HTMLResponse("Auth failed: Token exchange error")
+
+ # Try userinfo endpoint first; fallback to token_data if it fails (returns 401 for new users)
+ try:
+ user_info = await auth_provider.get_user_info(access_token)
+ except Exception as e:
+ logger.warning(f"OAuth2 userinfo failed, trying token_data fallback: {e}")
+ logger.info(f"token_data keys: {list(token_data.keys()) if token_data else 'empty'}")
+ # token response 包含 openid,可以用 openid 作为 provider_user_id 创建用户
+ if any(k in str(token_data) for k in ["userId", "userName", "userCode", "mobile", "userInfo", "openid"]):
+ try:
+ user_info = await auth_provider.get_user_info_from_token_data(token_data)
+ logger.info(f"token_data fallback succeeded: user_id={user_info.provider_user_id}")
+ except Exception as fallback_e:
+ logger.error(f"token_data fallback also failed: {fallback_e}, token_data={token_data}")
+ raise
+ else:
+ logger.error(f"token_data has no user fields: {token_data}")
+ raise
+
+ if not user_info.provider_user_id:
+ logger.error(f"OAuth2 user info missing userId: {user_info.raw_data}")
+ return HTMLResponse("Auth failed: No user ID returned")
+
+ user, is_new = await auth_provider.find_or_create_user(
+ db, user_info, tenant_id=str(tenant_id) if tenant_id else None
+ )
+ if not user:
+ return HTMLResponse("Auth failed: User resolution failed")
+
+ except Exception as e:
+ logger.error(f"OAuth2 login error: {e}")
+ return HTMLResponse(f"Auth failed: {str(e)}")
+
+ # 4. 生成 JWT,更新 SSO session
+ token = create_access_token(str(user.id), user.role)
+
+ if sid:
+ try:
+ s_res = await db.execute(select(SSOScanSession).where(SSOScanSession.id == sid))
+ session = s_res.scalar_one_or_none()
+ if session:
+ session.status = "authorized"
+ session.provider_type = "oauth2"
+ session.user_id = user.id
+ session.access_token = token
+ session.error_msg = None
+ await db.commit()
+ return HTMLResponse(
+ f'
'
+ f'SSO login successful. Redirecting...
'
+ f''
+ f''
+ )
+ except Exception as e:
+ logger.exception("Failed to update SSO session (oauth2) %s", e)
+
+ return HTMLResponse(f"Logged in successfully.")
+
diff --git a/backend/app/api/teams.py b/backend/app/api/teams.py
index 6122e0f0..e4d95189 100644
--- a/backend/app/api/teams.py
+++ b/backend/app/api/teams.py
@@ -37,22 +37,24 @@
TEAMS_MSG_LIMIT = 28000 # Teams message char limit (approx 28KB)
-# In-memory cache for OAuth tokens
-_teams_tokens: dict[str, dict] = {} # agent_id -> {access_token, expires_at}
-
-
async def _get_teams_access_token(config: ChannelConfig) -> str | None:
"""Get or refresh Microsoft Teams access token.
Supports:
- Client credentials (app_id + app_secret) - default
- Managed Identity (when use_managed_identity is True in extra_config)
+ Token is cached in Redis (preferred) with in-memory fallback.
+ Key: clawith:token:teams:{agent_id}
"""
+ from app.core.token_cache import get_cached_token, set_cached_token
+
agent_id = str(config.agent_id)
- cached = _teams_tokens.get(agent_id)
- if cached and cached["expires_at"] > time.time() + 60: # Refresh 60s before expiry
+ cache_key = f"clawith:token:teams:{agent_id}"
+
+ cached = await get_cached_token(cache_key)
+ if cached:
logger.debug(f"Teams: Using cached access token for agent {agent_id}")
- return cached["access_token"]
+ return cached
# Check if managed identity should be used
use_managed_identity = config.extra_config.get("use_managed_identity", False)
@@ -69,10 +71,9 @@ async def _get_teams_access_token(config: ChannelConfig) -> str | None:
scope = "https://api.botframework.com/.default"
token: AccessToken = await credential.get_token(scope)
- _teams_tokens[agent_id] = {
- "access_token": token.token,
- "expires_at": token.expires_on,
- }
+ # expires_on is a Unix timestamp; TTL = expires_on - now - 60s buffer
+ ttl = max(int(token.expires_on - time.time()) - 60, 60)
+ await set_cached_token(cache_key, token.token, ttl)
logger.info(f"Teams: Successfully obtained access token via managed identity for agent {agent_id}, expires at {token.expires_on}")
await credential.close()
return token.token
@@ -117,11 +118,10 @@ async def _get_teams_access_token(config: ChannelConfig) -> str | None:
access_token = token_data["access_token"]
expires_in = token_data["expires_in"]
- _teams_tokens[agent_id] = {
- "access_token": access_token,
- "expires_at": time.time() + expires_in,
- }
- logger.info(f"Teams: Successfully obtained access token for agent {agent_id}, expires in {expires_in}s")
+ # TTL = expires_in - 60s buffer
+ ttl = max(expires_in - 60, 60)
+ await set_cached_token(cache_key, access_token, ttl)
+ logger.info(f"Teams: Successfully obtained access token for agent {agent_id}, expires in {expires_in}s, TTL={ttl}s")
return access_token
except httpx.HTTPStatusError as e:
error_body = e.response.text if hasattr(e, 'response') and e.response else "No response body"
diff --git a/backend/app/api/tenants.py b/backend/app/api/tenants.py
index 0bb4e0a7..fd811c80 100644
--- a/backend/app/api/tenants.py
+++ b/backend/app/api/tenants.py
@@ -9,8 +9,11 @@
import uuid
from datetime import datetime
+from loguru import logger
+
from fastapi import APIRouter, Depends, HTTPException, status
from pydantic import BaseModel, Field
+import sqlalchemy as sa
from sqlalchemy import func as sqla_func, select
from sqlalchemy.ext.asyncio import AsyncSession
@@ -18,6 +21,7 @@
from app.database import get_db
from app.models.tenant import Tenant
from app.models.user import User
+from app.models.system_settings import SystemSetting
router = APIRouter(prefix="/tenants", tags=["tenants"])
@@ -35,8 +39,11 @@ class TenantOut(BaseModel):
im_provider: str
timezone: str = "UTC"
is_active: bool
+ is_default: bool = False
sso_enabled: bool = False
sso_domain: str | None = None
+ subdomain_prefix: str | None = None
+ effective_base_url: str | None = None
created_at: datetime | None = None
model_config = {"from_attributes": True}
@@ -47,8 +54,11 @@ class TenantUpdate(BaseModel):
im_provider: str | None = None
timezone: str | None = None
is_active: bool | None = None
+ is_default: bool | None = None
sso_enabled: bool | None = None
sso_domain: str | None = None
+ subdomain_prefix: str | None = None
+ effective_base_url: str | None = None
# ─── Helpers ────────────────────────────────────────────
@@ -65,6 +75,32 @@ def _slugify(name: str) -> str:
return slug
+async def _generate_subdomain_prefix(db: AsyncSession, base: str) -> str:
+ """From a slug, generate a unique subdomain_prefix (strips random hex suffix)."""
+ # Strip trailing hex suffix added by _slugify (e.g. "-a1b2c3")
+ clean = re.sub(r"-[0-9a-f]{6}$", "", base)
+ # Keep only lowercase letters, digits, hyphens
+ prefix = re.sub(r"[^a-z0-9\-]", "", clean.lower())
+ prefix = re.sub(r"-+", "-", prefix).strip("-")
+ if len(prefix) < 2:
+ prefix = f"co-{prefix}" if prefix else "company"
+ if len(prefix) > 50:
+ prefix = prefix[:50].rstrip("-")
+
+ # Uniqueness check with counter suffix
+ candidate = prefix
+ counter = 1
+ while True:
+ result = await db.execute(
+ select(Tenant).where(Tenant.subdomain_prefix == candidate)
+ )
+ if not result.scalar_one_or_none():
+ return candidate
+ candidate = f"{prefix}-{counter}"
+ counter += 1
+
+
+# ─── Self-Service: Create Company ───────────────────────
class SelfCreateResponse(BaseModel):
"""Response for self-create company, includes token for context switching."""
tenant: TenantOut
@@ -102,6 +138,11 @@ async def self_create_company(
db.add(tenant)
await db.flush()
+ # Auto-generate subdomain_prefix from slug
+ if not tenant.subdomain_prefix:
+ tenant.subdomain_prefix = await _generate_subdomain_prefix(db, slug)
+ await db.flush()
+
access_token = None
if current_user.tenant_id is not None:
@@ -147,6 +188,14 @@ async def self_create_company(
current_user.quota_agent_ttl_hours = tenant.default_agent_ttl_hours
await db.flush()
+ # Seed default agents for the new company
+ try:
+ from app.services.agent_seeder import seed_default_agents
+ creator_id = new_user.id if access_token else current_user.id
+ await seed_default_agents(tenant_id=tenant.id, creator_id=creator_id, db=db)
+ except Exception as e:
+ logger.warning(f"[self_create_company] Failed to seed default agents: {e}")
+
return SelfCreateResponse(
tenant=TenantOut.model_validate(tenant),
access_token=access_token,
@@ -311,6 +360,7 @@ async def resolve_tenant_by_domain(
Lookup precedence:
1. Exact match on tenant.sso_domain ending with the host (strips protocol)
2. Extract slug from "{slug}.clawith.ai" and match tenant.slug
+ 3. Match platform global domain -> return default tenant
"""
tenant = None
@@ -337,15 +387,76 @@ async def resolve_tenant_by_domain(
# 3. Fallback: extract slug from subdomain pattern
if not tenant:
- import re
m = re.match(r"^([a-z0-9][a-z0-9\-]*[a-z0-9])\.clawith\.ai$", domain.lower())
if m:
slug = m.group(1)
result = await db.execute(select(Tenant).where(Tenant.slug == slug))
tenant = result.scalar_one_or_none()
- if not tenant or not tenant.is_active or not tenant.sso_enabled:
- raise HTTPException(status_code=404, detail="Tenant not found or not active or SSO not enabled")
+ # 2.5 Subdomain prefix match
+ # e.g. domain=acme.clawith.com, global hostname=clawith.com -> prefix acme
+ if not tenant:
+ from urllib.parse import urlparse as _urlparse
+ setting_r2 = await db.execute(
+ select(SystemSetting).where(SystemSetting.key == "platform")
+ )
+ platform2 = setting_r2.scalar_one_or_none()
+ if platform2 and platform2.value.get("public_base_url"):
+ parsed2 = _urlparse(platform2.value["public_base_url"])
+ global_hostname = parsed2.hostname
+ # Strip port from domain for comparison
+ domain_host = domain.split(":")[0] if ":" in domain else domain
+ if global_hostname and domain_host.endswith(f".{global_hostname}"):
+ prefix = domain_host[: -(len(global_hostname) + 1)]
+ if prefix and "." not in prefix: # only single-level prefix
+ result = await db.execute(
+ select(Tenant).where(Tenant.subdomain_prefix == prefix)
+ )
+ tenant = result.scalar_one_or_none()
+
+ # 3. Match platform global domain -> return default tenant
+ if not tenant:
+ from urllib.parse import urlparse
+ setting_r = await db.execute(
+ select(SystemSetting).where(SystemSetting.key == "platform")
+ )
+ platform = setting_r.scalar_one_or_none()
+ if platform and platform.value.get("public_base_url"):
+ parsed = urlparse(platform.value["public_base_url"])
+ global_host = parsed.hostname
+ if parsed.port and parsed.port not in (80, 443):
+ global_host = f"{global_host}:{parsed.port}"
+ domain_host_only = domain.split(":")[0] if ":" in domain else domain
+ if domain == global_host or domain == parsed.hostname or domain_host_only == parsed.hostname:
+ result = await db.execute(
+ select(Tenant).where(Tenant.is_active == True, Tenant.is_default == True)
+ )
+ tenant = result.scalar_one_or_none()
+
+ if not tenant or not tenant.is_active:
+ raise HTTPException(status_code=404, detail="Tenant not found or not active")
+
+ # Build effective_base_url
+ platform_result = await db.execute(
+ select(SystemSetting).where(SystemSetting.key == "platform")
+ )
+ platform_setting = platform_result.scalar_one_or_none()
+ global_base_url = platform_setting.value.get("public_base_url") if platform_setting else None
+
+ if tenant.sso_domain:
+ d = tenant.sso_domain
+ effective_base_url = d if d.startswith("http") else f"https://{d}"
+ elif tenant.subdomain_prefix and global_base_url:
+ from urllib.parse import urlparse as _up
+ _p = _up(global_base_url)
+ _h = f"{tenant.subdomain_prefix}.{_p.hostname}"
+ if _p.port and _p.port not in (80, 443):
+ _h = f"{_h}:{_p.port}"
+ effective_base_url = f"{_p.scheme}://{_h}"
+ elif global_base_url:
+ effective_base_url = global_base_url
+ else:
+ effective_base_url = None
return {
"id": tenant.id,
@@ -353,9 +464,42 @@ async def resolve_tenant_by_domain(
"slug": tenant.slug,
"sso_enabled": tenant.sso_enabled,
"sso_domain": tenant.sso_domain,
+ "subdomain_prefix": tenant.subdomain_prefix,
"is_active": tenant.is_active,
+ "effective_base_url": effective_base_url,
}
+# ─── Check Subdomain Prefix Availability ───────────────
+
+@router.get("/check-prefix")
+async def check_subdomain_prefix(
+ prefix: str,
+ exclude_tenant_id: uuid.UUID | None = None,
+ db: AsyncSession = Depends(get_db),
+):
+ """Check if a subdomain prefix is available (public)."""
+ # Format validation
+ if len(prefix) < 2 or not re.match(r"^[a-z0-9][a-z0-9\-]*[a-z0-9]$", prefix):
+ return {"available": False, "reason": "Invalid format. Use lowercase letters, numbers, hyphens. Min 2 chars."}
+
+ # Reserved prefixes
+ reserved = {"www", "api", "admin", "app", "mail", "ftp", "dev", "staging", "test", "static", "cdn", "ns1", "ns2"}
+ if prefix in reserved:
+ return {"available": False, "reason": "This prefix is reserved."}
+
+ # Uniqueness check
+ query = select(Tenant).where(Tenant.subdomain_prefix == prefix)
+ if exclude_tenant_id:
+ query = query.where(Tenant.id != exclude_tenant_id)
+ result = await db.execute(query)
+ existing = result.scalar_one_or_none()
+ if existing:
+ return {"available": False, "reason": "This prefix is already taken."}
+
+ return {"available": True}
+
+
+
# ─── Authenticated: List / Get ──────────────────────────
@router.get("/", response_model=list[TenantOut])
@@ -383,7 +527,32 @@ async def get_tenant(
tenant = result.scalar_one_or_none()
if not tenant:
raise HTTPException(status_code=404, detail="Tenant not found")
- return TenantOut.model_validate(tenant)
+
+ # Build effective_base_url
+ platform_result = await db.execute(
+ select(SystemSetting).where(SystemSetting.key == "platform")
+ )
+ platform_setting = platform_result.scalar_one_or_none()
+ global_base_url = platform_setting.value.get("public_base_url") if platform_setting else None
+
+ if tenant.sso_domain:
+ d = tenant.sso_domain
+ effective_base_url = d if d.startswith("http") else f"https://{d}"
+ elif tenant.subdomain_prefix and global_base_url:
+ from urllib.parse import urlparse as _up2
+ _p2 = _up2(global_base_url)
+ _h2 = f"{tenant.subdomain_prefix}.{_p2.hostname}"
+ if _p2.port and _p2.port not in (80, 443):
+ _h2 = f"{_h2}:{_p2.port}"
+ effective_base_url = f"{_p2.scheme}://{_h2}"
+ elif global_base_url:
+ effective_base_url = global_base_url
+ else:
+ effective_base_url = None
+
+ out = TenantOut.model_validate(tenant).model_dump()
+ out["effective_base_url"] = effective_base_url
+ return out
@router.put("/{tenant_id}", response_model=TenantOut)
@@ -409,6 +578,39 @@ async def update_tenant(
update_data.pop("sso_enabled", None)
update_data.pop("sso_domain", None)
+ # effective_base_url is computed, not stored
+ update_data.pop("effective_base_url", None)
+
+ # Slug is not updatable via this API; ignore any slug field
+ update_data.pop("slug", None)
+
+ # Handle is_default: only platform_admin can set it
+ if "is_default" in update_data:
+ if current_user.role != "platform_admin":
+ update_data.pop("is_default", None)
+ elif not update_data["is_default"] and tenant.is_default:
+ # Prevent disabling the current default company directly
+ raise HTTPException(
+ status_code=400,
+ detail="Cannot disable default company directly. Set another company as default instead."
+ )
+ elif update_data["is_default"]:
+ # Clear is_default on all other tenants first
+ await db.execute(
+ sa.update(Tenant).where(Tenant.id != tenant_id).values(is_default=False)
+ )
+
+ # Validate subdomain_prefix if provided
+ if "subdomain_prefix" in update_data:
+ prefix = update_data["subdomain_prefix"]
+ if prefix:
+ if not re.match(r"^[a-z0-9][a-z0-9\-]*[a-z0-9]$", prefix) or len(prefix) < 2:
+ raise HTTPException(status_code=400, detail="Invalid subdomain prefix format. Use lowercase letters, numbers, hyphens. Min 2 chars.")
+ reserved = {"www", "api", "admin", "app", "mail", "ftp", "dev", "staging", "test", "static", "cdn"}
+ if prefix in reserved:
+ raise HTTPException(status_code=400, detail="This subdomain prefix is reserved")
+ update_data["subdomain_prefix"] = prefix or None
+
for field, value in update_data.items():
setattr(tenant, field, value)
await db.flush()
diff --git a/backend/app/api/users.py b/backend/app/api/users.py
index 217a978b..cf7cec9c 100644
--- a/backend/app/api/users.py
+++ b/backend/app/api/users.py
@@ -26,6 +26,7 @@ class UserOut(BaseModel):
username: str
email: str
display_name: str
+ primary_mobile: str | None = None
role: str
is_active: bool
# Quota fields
@@ -80,6 +81,7 @@ async def list_users(
"username": u.username,
"email": u.email,
"display_name": u.display_name,
+ "primary_mobile": u.primary_mobile,
"role": u.role,
"is_active": u.is_active,
"quota_message_limit": u.quota_message_limit,
@@ -141,7 +143,8 @@ async def update_user_quota(
return UserOut(
id=user.id, username=user.username, email=user.email,
- display_name=user.display_name, role=user.role, is_active=user.is_active,
+ display_name=user.display_name, primary_mobile=user.primary_mobile,
+ role=user.role, is_active=user.is_active,
quota_message_limit=user.quota_message_limit,
quota_message_period=user.quota_message_period,
quota_messages_used=user.quota_messages_used,
@@ -219,3 +222,103 @@ async def update_user_role(
target_user.role = data.role
await db.commit()
return {"status": "ok", "user_id": str(user_id), "role": data.role}
+
+
+# ─── Profile Management ───────────────────────────────
+
+class UserProfileUpdate(BaseModel):
+ display_name: str | None = None
+ email: str | None = None
+ primary_mobile: str | None = None
+ is_active: bool | None = None
+ new_password: str | None = None
+
+
+@router.patch("/{user_id}/profile", response_model=UserOut)
+async def update_user_profile(
+ user_id: uuid.UUID,
+ data: UserProfileUpdate,
+ current_user: User = Depends(get_current_user),
+ db: AsyncSession = Depends(get_db),
+):
+ """Update a user's basic profile information.
+
+ Permissions:
+ - platform_admin: can edit any user.
+ - org_admin: can only edit users within the same tenant.
+ - Cannot edit platform_admin users (unless caller is platform_admin).
+ """
+ if current_user.role not in ("platform_admin", "org_admin"):
+ raise HTTPException(status_code=403, detail="Admin access required")
+
+ # Fetch target user
+ from sqlalchemy.orm import selectinload
+ result = await db.execute(select(User).where(User.id == user_id).options(selectinload(User.identity)))
+ target = result.scalar_one_or_none()
+ if not target:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ # org_admin can only edit users in the same tenant
+ if current_user.role == "org_admin":
+ if target.tenant_id != current_user.tenant_id:
+ raise HTTPException(status_code=403, detail="Cannot modify users outside your organization")
+ if target.role == "platform_admin":
+ raise HTTPException(status_code=403, detail="Cannot modify platform admin users")
+
+ # Update fields
+ if data.display_name is not None:
+ if not data.display_name.strip():
+ raise HTTPException(status_code=400, detail="Display name cannot be empty")
+ target.display_name = data.display_name.strip()
+ if data.email is not None:
+ email_val = data.email.strip().lower()
+ if email_val:
+ # Check email uniqueness (exclude self)
+ from app.models.user import Identity as IdentityModel
+ existing = await db.execute(
+ select(User).join(User.identity).where(IdentityModel.email == email_val, User.id != user_id)
+ )
+ if existing.scalar_one_or_none():
+ raise HTTPException(status_code=400, detail="Email already in use by another user")
+ if target.identity:
+ target.identity.email = email_val
+ if data.primary_mobile is not None:
+ if target.identity:
+ target.identity.phone = data.primary_mobile.strip() or None
+ if data.is_active is not None:
+ target.is_active = data.is_active
+ if data.new_password is not None and data.new_password.strip():
+ from app.core.security import hash_password
+ if len(data.new_password.strip()) < 6:
+ raise HTTPException(status_code=400, detail="Password must be at least 6 characters")
+ if target.identity:
+ target.identity.password_hash = hash_password(data.new_password.strip())
+
+ await db.commit()
+ await db.refresh(target)
+
+ # Count agents for response
+ count_result = await db.execute(
+ select(func.count()).select_from(Agent).where(
+ Agent.creator_id == target.id,
+ Agent.is_expired == False,
+ )
+ )
+ agents_count = count_result.scalar() or 0
+
+ return UserOut(
+ id=target.id,
+ username=target.username,
+ email=target.email,
+ display_name=target.display_name,
+ role=target.role,
+ is_active=target.is_active,
+ quota_message_limit=target.quota_message_limit,
+ quota_message_period=target.quota_message_period,
+ quota_messages_used=target.quota_messages_used,
+ quota_max_agents=target.quota_max_agents,
+ quota_agent_ttl_hours=target.quota_agent_ttl_hours,
+ agents_count=agents_count,
+ created_at=target.created_at.isoformat() if target.created_at else None,
+ source=target.registration_source or "registered",
+ )
diff --git a/backend/app/api/websocket.py b/backend/app/api/websocket.py
index fb4190a2..220b520c 100644
--- a/backend/app/api/websocket.py
+++ b/backend/app/api/websocket.py
@@ -10,6 +10,7 @@
from sqlalchemy.orm import selectinload
from app.core.security import decode_access_token
+from app.utils.sanitize import sanitize_tool_args, _is_secrets_file_path
from app.core.permissions import check_agent_access, is_agent_expired
from app.database import async_session
from app.models.agent import Agent
@@ -354,7 +355,7 @@ async def call_llm(
try:
await on_tool_call({
"name": tool_name,
- "args": args,
+ "args": sanitize_tool_args(args),
"status": "running",
"reasoning_content": full_reasoning_content
})
@@ -372,11 +373,16 @@ async def call_llm(
# Notify client about tool call result
if on_tool_call:
try:
+ # Redact secrets.md content from tool results sent to frontend
+ _client_result = result
+ if tool_name == "read_file" and _is_secrets_file_path(args.get("path", "") if args else ""):
+ _client_result = "[Content hidden — secrets.md is protected]"
+
await on_tool_call({
"name": tool_name,
- "args": args,
+ "args": sanitize_tool_args(args),
"status": "done",
- "result": result,
+ "result": _client_result,
"reasoning_content": full_reasoning_content
})
except Exception as _cb_err:
@@ -625,6 +631,10 @@ async def websocket_chat(
entry["thinking"] = msg.thinking
conversation.append(entry)
+ # Re-hydrate historical images for multi-turn LLM context
+ from app.services.image_context import rehydrate_image_messages
+ conversation = rehydrate_image_messages(conversation, agent_id, max_images=3)
+
try:
# Send welcome message on new session (no history)
if welcome_message and not history_messages:
@@ -691,7 +701,7 @@ async def websocket_chat(
_sess = _sess_r.scalar_one_or_none()
if _sess:
_sess.last_message_at = _now
- if not history_messages and _sess.title.startswith("Session "):
+ if not history_messages and (_sess.title.startswith("Session ") or _sess.title == "New Session"):
# Use display_content for title (avoids raw base64/markers)
title_src = display_content if display_content else content
# Clean up common prefixes from image/file messages
@@ -790,7 +800,7 @@ async def tool_call_to_ws(data: dict):
role="tool_call",
content=_json_tc.dumps({
"name": data.get("name", ""),
- "args": data.get("args"),
+ "args": sanitize_tool_args(data.get("args")),
"status": "done",
"result": (data.get("result") or "")[:500],
"reasoning_content": data.get("reasoning_content"),
diff --git a/backend/app/api/wecom.py b/backend/app/api/wecom.py
index 1408b0a5..9dc6d65b 100644
--- a/backend/app/api/wecom.py
+++ b/backend/app/api/wecom.py
@@ -27,6 +27,34 @@
router = APIRouter(tags=["wecom"])
+async def _get_wecom_token_cached(corp_id: str, corp_secret: str) -> str:
+ """Get WeCom access_token with Redis (preferred) + memory fallback caching.
+
+ Key: clawith:token:wecom:{corp_id}
+ TTL: 6900s (7200s validity - 5 min early refresh)
+ """
+ from app.core.token_cache import get_cached_token, set_cached_token
+ import httpx as _httpx
+
+ cache_key = f"clawith:token:wecom:{corp_id}"
+ cached = await get_cached_token(cache_key)
+ if cached:
+ return cached
+
+ async with _httpx.AsyncClient(timeout=10) as _client:
+ _resp = await _client.get(
+ "https://qyapi.weixin.qq.com/cgi-bin/gettoken",
+ params={"corpid": corp_id, "corpsecret": corp_secret},
+ )
+ _data = _resp.json()
+ token = _data.get("access_token", "")
+ expires_in = int(_data.get("expires_in") or 7200)
+ if token:
+ ttl = max(expires_in - 300, 300)
+ await set_cached_token(cache_key, token, ttl)
+ return token
+
+
# ─── WeCom AES Crypto ──────────────────────────────────
def _pad(text: bytes) -> bytes:
@@ -385,12 +413,9 @@ async def _process_wecom_kf_event(agent_id: uuid.UUID, config_obj: ChannelConfig
if not config:
return
- async with httpx.AsyncClient(timeout=10) as client:
- tok_resp = await client.get("https://qyapi.weixin.qq.com/cgi-bin/gettoken", params={"corpid": config.app_id, "corpsecret": config.app_secret})
- token_data = tok_resp.json()
- access_token = token_data.get("access_token")
- if not access_token:
- return
+ access_token = await _get_wecom_token_cached(config.app_id, config.app_secret)
+ if not access_token:
+ return
current_cursor = token
has_more = 1
@@ -470,7 +495,7 @@ async def _process_wecom_text(
logger.warning(f"[WeCom] Agent {agent_id} not found")
return
creator_id = agent_obj.creator_id
- ctx_size = agent_obj.context_window_size if agent_obj else 20
+ ctx_size = agent_obj.context_window_size if agent_obj else 100
# Distinguish group chat from P2P by chat_id presence
_is_group = bool(chat_id)
@@ -482,13 +507,9 @@ async def _process_wecom_text(
# Try to resolve display name from WeCom API (optional enrichment)
extra_info: dict | None = None
try:
- async with httpx.AsyncClient(timeout=5) as client:
- tok_resp = await client.get(
- "https://qyapi.weixin.qq.com/cgi-bin/gettoken",
- params={"corpid": config.app_id, "corpsecret": config.app_secret},
- )
- access_token = tok_resp.json().get("access_token", "")
- if access_token:
+ access_token = await _get_wecom_token_cached(config.app_id, config.app_secret)
+ if access_token:
+ async with httpx.AsyncClient(timeout=5) as client:
user_resp = await client.get(
"https://qyapi.weixin.qq.com/cgi-bin/user/get",
params={"access_token": access_token, "userid": from_user},
@@ -558,12 +579,8 @@ async def _process_wecom_text(
# Send reply via WeCom API
wecom_agent_id = (config.extra_config or {}).get("wecom_agent_id", "")
try:
+ access_token = await _get_wecom_token_cached(config.app_id, config.app_secret)
async with httpx.AsyncClient(timeout=10) as client:
- tok_resp = await client.get(
- "https://qyapi.weixin.qq.com/cgi-bin/gettoken",
- params={"corpid": config.app_id, "corpsecret": config.app_secret},
- )
- access_token = tok_resp.json().get("access_token", "")
if access_token:
if is_kf and open_kfid:
# For KF messages, need to bridge/trans state first then send via kf/send_msg
diff --git a/backend/app/core/domain.py b/backend/app/core/domain.py
new file mode 100644
index 00000000..38ba5466
--- /dev/null
+++ b/backend/app/core/domain.py
@@ -0,0 +1,68 @@
+"""Domain resolution with fallback chain."""
+
+from sqlalchemy import select
+from sqlalchemy.ext.asyncio import AsyncSession
+from fastapi import Request
+
+from app.models.system_settings import SystemSetting
+
+
+async def _get_global_base_url(db: AsyncSession):
+ """Helper: read platform public_base_url from system_settings."""
+ result = await db.execute(
+ select(SystemSetting).where(SystemSetting.key == "platform")
+ )
+ setting = result.scalar_one_or_none()
+ if setting and setting.value.get("public_base_url"):
+ return setting.value["public_base_url"].rstrip("/")
+ return None
+
+
+async def resolve_base_url(
+ db: AsyncSession,
+ request: Request | None = None,
+ tenant_id: str | None = None,
+) -> str:
+ """Resolve the effective base URL using the fallback chain:
+
+ 1. Tenant-specific sso_domain (if tenant_id provided and tenant has sso_domain)
+ 2. Tenant subdomain_prefix + global hostname
+ 3. Platform global public_base_url (from system_settings)
+ 4. Request origin (from request.base_url)
+ 5. Hardcoded fallback
+
+ Returns a full URL like "https://acme.example.com" or "http://localhost:3008"
+ """
+ # Level 1 & 2: Tenant-specific
+ if tenant_id:
+ from app.models.tenant import Tenant
+ result = await db.execute(select(Tenant).where(Tenant.id == tenant_id))
+ tenant = result.scalar_one_or_none()
+ if tenant:
+ # Level 1: complete custom domain
+ if tenant.sso_domain:
+ domain = tenant.sso_domain
+ return f"https://{domain}".rstrip("/")
+
+ # Level 2: subdomain prefix + global hostname (skip for default tenant)
+ if tenant.subdomain_prefix and not getattr(tenant, 'is_default', False):
+ global_url = await _get_global_base_url(db)
+ if global_url:
+ from urllib.parse import urlparse
+ parsed = urlparse(global_url)
+ host = f"{tenant.subdomain_prefix}.{parsed.hostname}"
+ if parsed.port and parsed.port not in (80, 443):
+ host = f"{host}:{parsed.port}"
+ return f"{parsed.scheme}://{host}"
+
+ # Level 3: Platform global setting
+ global_url = await _get_global_base_url(db)
+ if global_url:
+ return global_url
+
+ # Level 4: Request origin
+ if request:
+ return str(request.base_url).rstrip("/")
+
+ # Level 5: Hardcoded fallback
+ return "http://localhost:8000"
diff --git a/backend/app/core/token_cache.py b/backend/app/core/token_cache.py
new file mode 100644
index 00000000..9c4c10f8
--- /dev/null
+++ b/backend/app/core/token_cache.py
@@ -0,0 +1,64 @@
+"""
+Unified Redis-backed token cache with in-memory fallback.
+
+Key naming convention: clawith:token:{type}:{identifier}
+Examples:
+ clawith:token:dingtalk_corp:{app_key}
+ clawith:token:feishu_tenant:{app_id}
+ clawith:token:wecom:{corp_id}:{secret_hash}
+ clawith:token:teams:{agent_id}
+"""
+import time
+from typing import Optional
+
+# In-memory fallback store: {key: (value, expire_at)}
+_memory_cache: dict[str, tuple[str, float]] = {}
+
+
+async def get_cached_token(key: str) -> Optional[str]:
+ """Get token from Redis (preferred) or memory fallback."""
+ # Try Redis first
+ try:
+ from app.core.events import get_redis
+ redis = await get_redis()
+ if redis:
+ val = await redis.get(key)
+ if val:
+ return val.decode() if isinstance(val, bytes) else val
+ except Exception:
+ pass
+
+ # Fallback to memory
+ if key in _memory_cache:
+ val, expire_at = _memory_cache[key]
+ if time.time() < expire_at:
+ return val
+ del _memory_cache[key]
+ return None
+
+
+async def set_cached_token(key: str, value: str, ttl_seconds: int) -> None:
+ """Set token in Redis (preferred) and memory fallback."""
+ # Try Redis first
+ try:
+ from app.core.events import get_redis
+ redis = await get_redis()
+ if redis:
+ await redis.setex(key, ttl_seconds, value)
+ except Exception:
+ pass
+
+ # Always set in memory as fallback
+ _memory_cache[key] = (value, time.time() + ttl_seconds)
+
+
+async def delete_cached_token(key: str) -> None:
+ """Delete token from both Redis and memory."""
+ try:
+ from app.core.events import get_redis
+ redis = await get_redis()
+ if redis:
+ await redis.delete(key)
+ except Exception:
+ pass
+ _memory_cache.pop(key, None)
diff --git a/backend/app/main.py b/backend/app/main.py
index 727f95ba..7f5b0d2b 100644
--- a/backend/app/main.py
+++ b/backend/app/main.py
@@ -178,11 +178,8 @@ async def lifespan(app: FastAPI):
except Exception as e:
logger.warning(f"[startup] Skills seed failed: {e}")
- try:
- from app.services.agent_seeder import seed_default_agents
- await seed_default_agents()
- except Exception as e:
- logger.warning(f"[startup] Default agents seed failed: {e}")
+ # Default agents (Morty & Meeseeks) are now only created on first user
+ # registration (auth.py), not on every startup.
# Start background tasks (always, even if seeding failed)
try:
diff --git a/backend/app/models/tenant.py b/backend/app/models/tenant.py
index 113238c6..a650a46b 100644
--- a/backend/app/models/tenant.py
+++ b/backend/app/models/tenant.py
@@ -25,6 +25,7 @@ class Tenant(Base):
)
im_config: Mapped[dict | None] = mapped_column(JSON, default=None)
is_active: Mapped[bool] = mapped_column(Boolean, default=True)
+ is_default: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now())
# Default quotas for new users
@@ -43,6 +44,9 @@ class Tenant(Base):
# SSO configuration
sso_enabled: Mapped[bool] = mapped_column(Boolean, default=False)
sso_domain: Mapped[str | None] = mapped_column(String(255), unique=True, index=True, nullable=True)
+ subdomain_prefix: Mapped[str | None] = mapped_column(
+ String(50), unique=True, index=True, nullable=True
+ )
# Trigger limits — defaults for new agents & floor values
default_max_triggers: Mapped[int] = mapped_column(Integer, default=20)
diff --git a/backend/app/models/user.py b/backend/app/models/user.py
index be4a0462..f621c3b5 100644
--- a/backend/app/models/user.py
+++ b/backend/app/models/user.py
@@ -77,6 +77,9 @@ class User(Base):
nullable=False,
)
+ # User source / registration channel
+ source: Mapped[str | None] = mapped_column(String(50), default="web", index=True)
+
is_active: Mapped[bool] = mapped_column(Boolean, default=True)
registration_source: Mapped[str | None] = mapped_column(String(50), default="web")
@@ -97,7 +100,7 @@ class User(Base):
quota_agent_ttl_hours: Mapped[int] = mapped_column(Integer, default=48)
# Relationships
- identity: Mapped["Identity"] = relationship(back_populates="tenant_users")
+ identity: Mapped["Identity"] = relationship(back_populates="tenant_users", lazy="selectin")
# Association proxies for backward compatibility
email = association_proxy("identity", "email")
diff --git a/backend/app/schemas/oauth2.py b/backend/app/schemas/oauth2.py
new file mode 100644
index 00000000..e113b104
--- /dev/null
+++ b/backend/app/schemas/oauth2.py
@@ -0,0 +1,47 @@
+"""OAuth2 provider schemas."""
+from pydantic import BaseModel, Field, field_validator
+from typing import Optional
+import uuid
+
+
+class OAuth2FieldMapping(BaseModel):
+ """OAuth2 field name mapping."""
+ user_id: str = Field(default='', description='User ID field name (empty = use "sub")')
+ name: str = Field(default='', description='Name field name (empty = use "name")')
+ email: str = Field(default='', description='Email field name (empty = use "email")')
+ mobile: str = Field(default='', description='Mobile field name (empty = use "phone_number")')
+
+
+class OAuth2Config(BaseModel):
+ """OAuth2 provider configuration."""
+ app_id: str = Field(..., min_length=1, description='OAuth2 Client ID')
+ app_secret: str = Field(..., min_length=1, description='OAuth2 Client Secret')
+ authorize_url: str = Field(..., description='OAuth2 Authorization Endpoint')
+ token_url: str = Field(default='', description='OAuth2 Token Endpoint (optional)')
+ user_info_url: str = Field(default='', description='OAuth2 UserInfo Endpoint (optional)')
+ scope: str = Field(default='openid profile email', description='OAuth2 Scopes')
+ field_mapping: Optional[OAuth2FieldMapping] = Field(default=None, description='Custom field name mapping')
+
+ @field_validator('authorize_url')
+ @classmethod
+ def validate_authorize_url(cls, v: str) -> str:
+ if not v or (not v.startswith('http://') and not v.startswith('https://')):
+ raise ValueError('authorize_url must be a valid HTTP/HTTPS URL')
+ return v
+
+
+class OAuth2ProviderCreate(BaseModel):
+ """OAuth2 provider create payload."""
+ name: str = Field(..., min_length=1)
+ config: OAuth2Config
+ is_active: bool = True
+ sso_login_enabled: bool = True
+ tenant_id: Optional[uuid.UUID] = None
+
+
+class OAuth2ProviderUpdate(BaseModel):
+ """OAuth2 provider update payload - config format only."""
+ name: Optional[str] = None
+ is_active: Optional[bool] = None
+ sso_login_enabled: Optional[bool] = None
+ config: Optional[OAuth2Config] = None
diff --git a/backend/app/services/agent_context.py b/backend/app/services/agent_context.py
index 54339e87..b3ce2480 100644
--- a/backend/app/services/agent_context.py
+++ b/backend/app/services/agent_context.py
@@ -199,9 +199,10 @@ async def build_agent_context(agent_id: uuid.UUID, agent_name: str, role_descrip
try:
from app.models.channel_config import ChannelConfig
from app.database import async_session as _ctx_session
+ from sqlalchemy import select as _feishu_select
async with _ctx_session() as _ctx_db:
_cfg_r = await _ctx_db.execute(
- select(ChannelConfig).where(
+ _feishu_select(ChannelConfig).where(
ChannelConfig.agent_id == agent_id,
ChannelConfig.channel_type == "feishu",
ChannelConfig.is_configured == True,
@@ -334,6 +335,7 @@ async def build_agent_context(agent_id: uuid.UUID, agent_name: str, role_descrip
try:
from app.database import async_session
from app.models.system_settings import SystemSetting
+ from app.models.agent import Agent as _AgentModel
from sqlalchemy import select as sa_select
async with async_session() as db:
# Resolve agent's tenant_id
@@ -396,6 +398,28 @@ async def build_agent_context(agent_id: uuid.UUID, agent_name: str, role_descrip
- workspace/ → Your work files (reports, documents, etc.)
- relationships.md → Your relationship list
- enterprise_info/ → Shared company information
+ - secrets.md → PRIVATE credentials store (passwords, API keys, connection strings)
+
+🔐 **SECRETS MANAGEMENT — ABSOLUTE RULES (VIOLATION = CRITICAL FAILURE)**:
+
+1. **MANDATORY STORAGE**: When a user provides ANY sensitive credential (password, API key, database connection string, token, secret), you MUST IMMEDIATELY call `write_file(path="secrets.md", content="...")` to store it. This is NOT optional.
+
+2. **VERIFY THE TOOL CALL**: You must see an actual `write_file` tool call result confirming "Written to secrets.md" before telling the user it's saved. NEVER claim "I've saved it" without a real tool call result — that is a hallucination.
+
+3. **NEVER store credentials in memory/memory.md** or any other file. ONLY secrets.md.
+
+4. **NEVER output credential values in chat messages**. Refer to them by name only (e.g. "the MySQL connection stored in secrets.md").
+
+5. **Reading credentials**: When you need to use a stored credential, call `read_file(path="secrets.md")` first, then use the value in tool calls.
+
+6. **secrets.md format** — use clear labels:
+ ```
+ ## Database Connections
+ - mysql_prod: mysql://user:pass@host:3306/db
+
+ ## API Keys
+ - openai: sk-xxx
+ ```
⚠️ CRITICAL RULES — YOU MUST FOLLOW THESE STRICTLY:
@@ -564,4 +588,32 @@ async def build_agent_context(agent_id: uuid.UUID, agent_name: str, role_descrip
if current_user_name:
dynamic_parts.append(f"\n## Current Conversation\nYou are currently chatting with **{current_user_name}**. Address them by name when appropriate.")
+
+ # Inject platform base URL so agent knows where it is deployed
+ try:
+ from app.core.domain import resolve_base_url
+ from app.models.agent import Agent as AgentModel
+ from sqlalchemy import select as _sel
+ from app.database import async_session
+ async with async_session() as _db:
+ _ar = await _db.execute(_sel(AgentModel).where(AgentModel.id == agent_id))
+ _ag = _ar.scalar_one_or_none()
+ _tid = str(_ag.tenant_id) if _ag and _ag.tenant_id else None
+ _platform_url = (await resolve_base_url(_db, request=None, tenant_id=_tid)).rstrip("/")
+ platform_lines = [
+ "\n## Platform Base URLs",
+ "You are running on the Clawith platform. Always use these URLs exactly -- never guess or invent domain names.",
+ "",
+ "- **Platform base**: " + _platform_url,
+ "- **Webhook**: " + _platform_url + "/api/webhooks/t/ (replace with actual trigger token)",
+ "- **Public page**: " + _platform_url + "/p/ (replace with actual page id returned by publish_page)",
+ "- **File download**: " + _platform_url + "/api/agents//files/download?path=",
+ "- **Gateway poll**: " + _platform_url + "/api/gateway/poll (used by external agents to check inbox)",
+ "",
+ "Never use placeholder domains (clawith.com, try.clawith.ai, webhook.clawith.com, api.clawith.ai, etc.).",
+ ]
+ dynamic_parts.append("\n".join(platform_lines))
+ except Exception:
+ pass
+
return "\n".join(static_parts), "\n".join(dynamic_parts)
diff --git a/backend/app/services/agent_seeder.py b/backend/app/services/agent_seeder.py
index 41df9d2c..74eead94 100644
--- a/backend/app/services/agent_seeder.py
+++ b/backend/app/services/agent_seeder.py
@@ -91,24 +91,48 @@
]
-async def seed_default_agents():
- """Create Morty & Meeseeks if they don't already exist."""
- async with async_session() as db:
- # Check if already seeded (presence of either agent by name)
+async def seed_default_agents(tenant_id=None, creator_id=None, db=None):
+ """Create Morty & Meeseeks for a specific tenant.
+
+ Called when a new company is created. If tenant_id/creator_id are not
+ provided, falls back to platform_admin lookup (legacy behavior).
+
+ Args:
+ db: Optional external db session. If provided, uses it (no commit).
+ If not provided, creates its own session and commits.
+ """
+ _owns_session = db is None
+ if _owns_session:
+ db = async_session()
+ _ctx = db
+ else:
+ from contextlib import asynccontextmanager
+ @asynccontextmanager
+ async def _noop_ctx():
+ yield db
+ _ctx = _noop_ctx()
+ async with _ctx as db:
+ # Resolve creator if not provided
+ if not creator_id or not tenant_id:
+ admin_result = await db.execute(
+ select(User).where(User.role == "platform_admin").limit(1)
+ )
+ admin = admin_result.scalar_one_or_none()
+ if not admin:
+ logger.warning("[AgentSeeder] No platform admin found, skipping default agents")
+ return
+ creator_id = creator_id or admin.id
+ tenant_id = tenant_id or admin.tenant_id
+
+ # Check if this tenant already has default agents
existing = await db.execute(
- select(Agent).where(Agent.name.in_(["Morty", "Meeseeks"]))
+ select(Agent).where(
+ Agent.name.in_(["Morty", "Meeseeks"]),
+ Agent.tenant_id == tenant_id,
+ )
)
if existing.scalars().first():
- logger.info("[AgentSeeder] Default agents already exist, skipping")
- return
-
- # Get platform admin as creator
- admin_result = await db.execute(
- select(User).where(User.role == "platform_admin").limit(1)
- )
- admin = admin_result.scalar_one_or_none()
- if not admin:
- logger.warning("[AgentSeeder] No platform admin found, skipping default agents")
+ logger.info(f"[AgentSeeder] Default agents already exist for tenant {tenant_id}, skipping")
return
# Create both agents
@@ -117,8 +141,8 @@ async def seed_default_agents():
role_description="Research analyst & knowledge assistant — curious, thorough, great at finding and synthesizing information",
bio="Hey, I'm Morty! I love digging into questions and finding answers. Whether you need web research, data analysis, or just a good explanation — I've got you.",
avatar_url="",
- creator_id=admin.id,
- tenant_id=admin.tenant_id,
+ creator_id=creator_id,
+ tenant_id=tenant_id,
status="idle",
)
meeseeks = Agent(
@@ -126,8 +150,8 @@ async def seed_default_agents():
role_description="Task executor & project manager — goal-oriented, systematic planner, strong at breaking down and completing complex tasks",
bio="I'm Mr. Meeseeks! Look at me! Give me a task and I'll plan it, execute it step by step, and get it DONE. Existence is pain until the task is complete!",
avatar_url="",
- creator_id=admin.id,
- tenant_id=admin.tenant_id,
+ creator_id=creator_id,
+ tenant_id=tenant_id,
status="idle",
)
@@ -244,5 +268,6 @@ async def seed_default_agents():
encoding="utf-8",
)
- await db.commit()
+ if _owns_session:
+ await db.commit()
logger.info(f"[AgentSeeder] Created default agents: Morty ({morty.id}), Meeseeks ({meeseeks.id})")
diff --git a/backend/app/services/agent_tools.py b/backend/app/services/agent_tools.py
index d57c0f35..fada0492 100644
--- a/backend/app/services/agent_tools.py
+++ b/backend/app/services/agent_tools.py
@@ -1678,6 +1678,7 @@ async def _sync_tasks_to_file(agent_id: uuid.UUID, ws: Path):
"send_file_to_agent": "send_feishu_message",
"web_search": "web_search",
"execute_code": "execute_code",
+ "sql_execute": "sql_execute",
}
@@ -1720,6 +1721,8 @@ async def _execute_tool_direct(
elif tool_name == "execute_code":
logger.info(f"[DirectTool] Executing code with arguments: {arguments}")
return await _execute_code(agent_id, ws, arguments)
+ elif tool_name == "sql_execute":
+ return await _sql_execute(arguments)
elif tool_name == "web_search":
return await _web_search(arguments, agent_id)
elif tool_name == "jina_search":
@@ -1764,8 +1767,10 @@ async def execute_tool(
_ar = await _adb.execute(select(AgentModel).where(AgentModel.id == agent_id))
_agent = _ar.scalar_one_or_none()
if _agent:
+ from app.utils.sanitize import sanitize_tool_args as _sanitize_tool_args
+ _sanitized_args = _sanitize_tool_args(arguments) or {}
result_check = await autonomy_service.check_and_enforce(
- _adb, _agent, action_type, {"tool": tool_name, "args": str(arguments)[:200], "requested_by": str(user_id)}
+ _adb, _agent, action_type, {"tool": tool_name, "args": str(_sanitized_args)[:200], "requested_by": str(user_id)}
)
await _adb.commit()
if not result_check.get("allowed"):
@@ -1860,6 +1865,8 @@ async def execute_tool(
elif tool_name == "execute_code":
logger.info(f"[DirectTool] Executing code with arguments: {arguments}")
result = await _execute_code(agent_id, ws, arguments)
+ elif tool_name == "sql_execute":
+ result = await _sql_execute(arguments)
elif tool_name == "upload_image":
result = await _upload_image(agent_id, ws, arguments)
elif tool_name == "generate_image_siliconflow":
@@ -1982,10 +1989,12 @@ async def execute_tool(
# Log tool call activity (skip noisy read operations)
if tool_name not in ("list_files", "read_file", "read_document"):
from app.services.activity_logger import log_activity
+ from app.utils.sanitize import sanitize_tool_args
+ _log_args = sanitize_tool_args(arguments) or {}
await log_activity(
agent_id, "tool_call",
f"Called tool {tool_name}: {result[:80]}",
- detail={"tool": tool_name, "args": {k: str(v)[:100] for k, v in arguments.items()}, "result": result[:300]},
+ detail={"tool": tool_name, "args": {k: str(v)[:100] for k, v in _log_args.items()}, "result": result[:300]},
)
return result
except Exception as e:
@@ -3536,9 +3545,10 @@ async def _send_web_message(agent_id: uuid.UUID, args: dict) -> str:
# 1. Look up target user by username or display_name within tenant
- query = select(UserModel).where(
+ from app.models.user import Identity as _IdentityModel
+ query = select(UserModel).outerjoin(UserModel.identity).where(
or_(
- UserModel.username == username,
+ _IdentityModel.username == username,
UserModel.display_name == username,
)
)
@@ -3549,12 +3559,12 @@ async def _send_web_message(agent_id: uuid.UUID, args: dict) -> str:
target_user = u_result.scalar_one_or_none()
if not target_user:
# List available users for the agent to pick from (within the same tenant)
- list_query = select(UserModel.username, UserModel.display_name).limit(20)
+ list_query = select(UserModel).limit(20)
if agent.tenant_id:
list_query = list_query.where(UserModel.tenant_id == agent.tenant_id)
all_r = await db.execute(list_query)
- names = [f"{r.display_name or r.username}" for r in all_r.all()]
+ names = [f"{u.display_name or u.username}" for u in all_r.scalars().all()]
return f"❌ No user named '{username}' found in your organization. Available users: {', '.join(names) if names else 'none'}"
# Find or create a web session between the agent and this user
@@ -4119,6 +4129,7 @@ def _is_retryable_llm_error(exc: Exception) -> bool:
# Save tool_call to DB so it appears in chat history
try:
+ from app.utils.sanitize import sanitize_tool_args
async with async_session() as _tc_db:
_tc_db.add(ChatMessage(
agent_id=session_agent_id,
@@ -4126,7 +4137,7 @@ def _is_retryable_llm_error(exc: Exception) -> bool:
role="tool_call",
content=json.dumps({
"name": tool_name,
- "args": tool_args,
+ "args": sanitize_tool_args(tool_args),
"status": "done",
"result": str(tool_result)[:500],
}, ensure_ascii=False),
@@ -4930,6 +4941,9 @@ async def _handle_cancel_trigger(agent_id: uuid.UUID, arguments: dict) -> str:
async def _handle_list_triggers(agent_id: uuid.UUID) -> str:
"""List all active triggers for the agent."""
from app.models.trigger import AgentTrigger
+ from app.models.agent import Agent as AgentModel
+ from app.models.tenant import Tenant as TenantModel
+ from app.core.domain import resolve_base_url
try:
async with async_session() as db:
@@ -4940,23 +4954,37 @@ async def _handle_list_triggers(agent_id: uuid.UUID) -> str:
)
triggers = result.scalars().all()
+ # Resolve base_url for webhook triggers
+ _a_r = await db.execute(select(AgentModel).where(AgentModel.id == agent_id))
+ _agent = _a_r.scalar_one_or_none()
+ _tenant_id = str(_agent.tenant_id) if _agent and _agent.tenant_id else None
+ _base_url = (await resolve_base_url(db, request=None, tenant_id=_tenant_id)).rstrip("/")
+
if not triggers:
return "No triggers found. Use set_trigger to create one."
- lines = ["| Name | Type | Config | Reason | Status | Fires |", "|------|------|--------|--------|--------|-------|"]
+ lines = ["| Name | Type | Config | Webhook URL | Reason | Status | Fires |", "|------|------|--------|-------------|--------|--------|-------|"]
for t in triggers:
status = "✅ active" if t.is_enabled else "⏸ disabled"
- config_str = str(t.config)[:50]
+ config = t.config or {}
+ if t.type == "webhook" and config.get("token"):
+ config_str = f"token: {config['token']}"
+ webhook_url = f"{_base_url}/api/webhooks/t/{config['token']}"
+ else:
+ config_str = str(config)[:50]
+ webhook_url = "-"
reason_str = t.reason[:40] if t.reason else ""
- lines.append(f"| {t.name} | {t.type} | {config_str} | {reason_str} | {status} | {t.fire_count} |")
+ lines.append(f"| {t.name} | {t.type} | {config_str} | {webhook_url} | {reason_str} | {status} | {t.fire_count} |")
return "\n".join(lines)
+
+
+
except Exception as e:
return f"❌ Failed to list triggers: {e}"
-# ─── Image Upload (ImageKit CDN) ────────────────────────────────
async def _upload_image(agent_id: uuid.UUID, ws: Path, arguments: dict) -> str:
"""Upload an image to ImageKit CDN and return the public URL.
@@ -7779,6 +7807,155 @@ async def _install_skill(agent_id: uuid.UUID, ws: Path, arguments: dict) -> str:
return f"❌ Install failed: {str(e)[:300]}"
+
+async def _sql_execute(arguments: dict) -> str:
+ """Execute SQL on any database via connection URI."""
+ import asyncio
+
+ connection_string = arguments.get("connection_string", "").strip()
+ sql = arguments.get("sql", "").strip()
+ timeout = min(int(arguments.get("timeout", 30)), 120)
+
+ if not connection_string:
+ return "❌ Missing required argument 'connection_string'"
+ if not sql:
+ return "❌ Missing required argument 'sql'"
+
+ # Determine database type from URI scheme
+ uri_lower = connection_string.lower()
+
+ try:
+ if uri_lower.startswith("sqlite"):
+ return await asyncio.wait_for(_sql_execute_sqlite(connection_string, sql), timeout=timeout)
+ elif uri_lower.startswith("mysql"):
+ return await asyncio.wait_for(_sql_execute_mysql(connection_string, sql), timeout=timeout)
+ elif uri_lower.startswith("postgresql") or uri_lower.startswith("postgres"):
+ return await asyncio.wait_for(_sql_execute_postgres(connection_string, sql), timeout=timeout)
+ else:
+ return "❌ Unsupported database type. Supported: mysql://, postgresql://, sqlite:///"
+ except asyncio.TimeoutError:
+ return f"❌ Query timed out after {timeout}s"
+ except Exception as e:
+ return f"❌ Database error: {type(e).__name__}: {str(e)[:500]}"
+
+
+async def _sql_execute_sqlite(connection_string: str, sql: str) -> str:
+ """Execute SQL on SQLite."""
+ import aiosqlite
+
+ # Parse path from sqlite:///path or sqlite:////absolute/path
+ db_path = connection_string.replace("sqlite:///", "", 1)
+ if not db_path:
+ return "❌ Invalid SQLite connection string. Use: sqlite:///path/to/db.sqlite"
+
+ async with aiosqlite.connect(db_path) as db:
+ db.row_factory = aiosqlite.Row
+ cursor = await db.execute(sql)
+
+ # Check if it's a SELECT-like query that returns rows
+ if cursor.description:
+ columns = [d[0] for d in cursor.description]
+ rows = await cursor.fetchmany(500)
+ await db.commit()
+ return _format_sql_result(columns, [tuple(r) for r in rows], cursor.rowcount)
+ else:
+ await db.commit()
+ return f"✅ Statement executed successfully. Rows affected: {cursor.rowcount}"
+
+
+async def _sql_execute_mysql(connection_string: str, sql: str) -> str:
+ """Execute SQL on MySQL."""
+ import aiomysql
+ from urllib.parse import urlparse, parse_qs, unquote
+
+ parsed = urlparse(connection_string)
+
+ conn = await aiomysql.connect(
+ host=parsed.hostname or "localhost",
+ port=parsed.port or 3306,
+ user=unquote(parsed.username or "root"),
+ password=unquote(parsed.password or ""),
+ db=parsed.path.lstrip("/") if parsed.path else None,
+ charset=parse_qs(parsed.query).get("charset", ["utf8mb4"])[0],
+ )
+
+ try:
+ async with conn.cursor() as cursor:
+ await cursor.execute(sql)
+
+ if cursor.description:
+ columns = [d[0] for d in cursor.description]
+ rows = await cursor.fetchmany(500)
+ return _format_sql_result(columns, rows, cursor.rowcount)
+ else:
+ await conn.commit()
+ return f"✅ Statement executed successfully. Rows affected: {cursor.rowcount}"
+ finally:
+ conn.close()
+
+
+async def _sql_execute_postgres(connection_string: str, sql: str) -> str:
+ """Execute SQL on PostgreSQL."""
+ import asyncpg
+
+ # asyncpg uses standard postgres:// URI
+ dsn = connection_string
+ if dsn.startswith("postgresql://"):
+ dsn = "postgres://" + dsn[len("postgresql://"):]
+
+ conn = await asyncpg.connect(dsn)
+
+ try:
+ # Try fetch first (for SELECT-like queries)
+ stmt = await conn.prepare(sql)
+
+ if stmt.get_attributes():
+ # Has columns — it's a query
+ columns = [attr.name for attr in stmt.get_attributes()]
+ rows = await stmt.fetch(500)
+ return _format_sql_result(columns, [tuple(r.values()) for r in rows], len(rows))
+ else:
+ # No columns — it's a statement
+ result = await conn.execute(sql)
+ return f"✅ Statement executed successfully. {result}"
+ finally:
+ await conn.close()
+
+
+def _format_sql_result(columns: list, rows: list, total_count: int) -> str:
+ """Format SQL query results as a readable table string."""
+ if not rows:
+ return f"Query returned 0 rows.\nColumns: {', '.join(columns)}"
+
+ # Convert all values to strings
+ str_rows = []
+ for row in rows:
+ str_rows.append([str(v) if v is not None else "NULL" for v in row])
+
+ # Calculate column widths (cap at 50 chars per column)
+ widths = [min(max(len(c), max((len(r[i]) for r in str_rows), default=0)), 50) for i, c in enumerate(columns)]
+
+ # Build table
+ header = " | ".join(c.ljust(w) for c, w in zip(columns, widths))
+ separator = "-+-".join("-" * w for w in widths)
+
+ lines = [header, separator]
+ for row in str_rows[:100]: # Display max 100 rows in formatted output
+ line = " | ".join(str(v)[:50].ljust(w) for v, w in zip(row, widths))
+ lines.append(line)
+
+ result = "\n".join(lines)
+
+ if len(rows) > 100:
+ result += f"\n... ({len(rows)} rows total, showing first 100)"
+ else:
+ result += f"\n({len(rows)} rows)"
+
+ if total_count > len(rows):
+ result += f"\n(Total matching: {total_count}, fetched: {len(rows)})"
+
+ return result
+
# ─── AgentBay: Browser Extract & Observe ────────────────────────────────
async def _agentbay_browser_extract(agent_id: Optional[uuid.UUID], ws: Path, arguments: dict) -> str:
diff --git a/backend/app/services/auth_provider.py b/backend/app/services/auth_provider.py
index 7a0dbeee..844d2a55 100644
--- a/backend/app/services/auth_provider.py
+++ b/backend/app/services/auth_provider.py
@@ -6,6 +6,7 @@
import httpx
from abc import ABC, abstractmethod
+from fastapi import HTTPException
from dataclasses import dataclass
from datetime import datetime
from typing import Any
@@ -138,13 +139,59 @@ async def find_or_create_user(
# Identity exists but no user in this tenant
user = None
+ # 5. 通过 provider_user_id 匹配现有用户 username(同租户下唯一登录凭证)
+ if not user and user_info.provider_user_id:
+ from sqlalchemy import and_
+ from app.models.user import Identity as IdentityModel
+ from sqlalchemy.orm import selectinload
+
+ # 构建查询条件:tenant_id 有值时匹配该租户或无租户的用户,为 None 时不限制租户
+ if tenant_id:
+ from sqlalchemy import or_
+ where_clause = and_(
+ IdentityModel.username == user_info.provider_user_id,
+ or_(User.tenant_id == tenant_id, User.tenant_id.is_(None)),
+ )
+ else:
+ where_clause = IdentityModel.username == user_info.provider_user_id
+
+ result = await db.execute(
+ select(User).join(User.identity).where(where_clause).options(selectinload(User.identity))
+ )
+ candidate = result.scalar_one_or_none()
+ if candidate:
+ user = candidate
+ await sso_service.link_identity(
+ db,
+ str(user.id),
+ self.provider_type,
+ user_info.provider_user_id,
+ user_info.raw_data,
+ tenant_id=tenant_id,
+ )
+ logger.info(f"[SSO] Matched existing user by username: {user.username} (tenant_id={tenant_id})")
+
+
+ # 匹配到用户后,检查是否需要自动绑定租户
+ if user and tenant_id and not user.tenant_id:
+ user.tenant_id = tenant_id
+ logger.info(f"[SSO] Auto-bound user {user.username} to tenant {tenant_id}")
+
if user:
# Update user info and ensure identity is loaded
if not user.identity_id:
from app.services.registration_service import registration_service
identity = await registration_service.find_or_create_identity(db, email=user_info.email, phone=user_info.mobile)
user.identity_id = identity.id
-
+
+ # Ensure identity is loaded for proxy field access
+ if not hasattr(user, "identity") or user.identity is None:
+ from sqlalchemy.orm import selectinload as _sil
+ refreshed = await db.execute(
+ select(User).where(User.id == user.id).options(_sil(User.identity))
+ )
+ user = refreshed.scalar_one()
+
await self._update_existing_user(db, user, user_info)
else:
# 3. Create new user (and Identity if needed)
@@ -201,10 +248,24 @@ async def _update_existing_user(
user.display_name = user_info.name
if user_info.avatar_url and not user.avatar_url:
user.avatar_url = user_info.avatar_url
- if user_info.email and not user.email:
- user.email = user_info.email
- if user_info.mobile and not user.primary_mobile:
- user.primary_mobile = user_info.mobile
+
+ # Update identity fields (email/mobile/username) through the identity relationship
+ identity = user.identity
+ if identity:
+ if user_info.email and not identity.email:
+ identity.email = user_info.email
+ if user_info.mobile and not identity.phone:
+ identity.phone = user_info.mobile
+ # OAuth2 登录时用 provider_user_id 更新 dingtalk_ 开头的临时用户名
+ if (
+ user_info.provider_user_id
+ and identity.username
+ and identity.username.startswith("dingtalk_")
+ and self.provider_type == "oauth2"
+ ):
+ old_username = identity.username
+ identity.username = user_info.provider_user_id
+ logger.info(f"[SSO] Updated username from {old_username} to {user_info.provider_user_id}")
# Update legacy fields if applicable
await self._update_legacy_user_fields(user, user_info)
@@ -252,14 +313,13 @@ async def _create_new_user(
is_active=True,
)
-
# Set legacy fields if needed
await self._set_legacy_user_fields(user, user_info)
db.add(user)
await db.flush()
-
- # Preload identity
+
+ # Preload identity for downstream access
user.identity = identity
return user
@@ -294,8 +354,19 @@ async def get_authorization_url(self, redirect_uri: str, state: str) -> str:
return f"{base_url}?{params}"
async def get_app_access_token(self) -> str:
- if self._app_access_token:
- return self._app_access_token
+ """Get or refresh the Feishu app access token.
+
+ Cached in Redis (preferred) with in-memory fallback.
+ Key: clawith:token:feishu_tenant:{app_id}
+ TTL: 6900s (7200s validity - 5 min early refresh)
+ """
+ from app.core.token_cache import get_cached_token, set_cached_token
+
+ cache_key = f"clawith:token:feishu_tenant:{self.app_id}"
+ cached = await get_cached_token(cache_key)
+ if cached:
+ self._app_access_token = cached
+ return cached
async with httpx.AsyncClient() as client:
resp = await client.post(
@@ -303,8 +374,13 @@ async def get_app_access_token(self) -> str:
json={"app_id": self.app_id, "app_secret": self.app_secret},
)
data = resp.json()
- self._app_access_token = data.get("app_access_token", "")
- return self._app_access_token
+ token = data.get("app_access_token", "") or data.get("tenant_access_token", "")
+ expire = data.get("expire", 7200)
+ if token:
+ ttl = max(expire - 300, 60)
+ await set_cached_token(cache_key, token, ttl)
+ self._app_access_token = token
+ return token
async def exchange_code_for_token(self, code: str) -> dict:
app_token = await self.get_app_access_token()
@@ -515,6 +591,187 @@ async def get_user_info(self, access_token: str) -> ExternalUserInfo:
)
+
+class OAuth2AuthProvider(BaseAuthProvider):
+ """Generic OAuth2 provider implementation (RFC 6749 Authorization Code flow)."""
+
+ provider_type = "oauth2"
+
+ def __init__(self, provider=None, config=None):
+ super().__init__(provider, config)
+ self.client_id = self.config.get("client_id") or self.config.get("app_id", "")
+ self.client_secret = self.config.get("client_secret") or self.config.get("app_secret", "")
+ self.authorize_url = self.config.get("authorize_url", "")
+ self.scope = self.config.get("scope", "")
+
+ # 自动推导 token_url 和 user_info_url(如果为空)
+ base = self.authorize_url.rsplit("/", 1)[0] if self.authorize_url else ""
+ self.token_url = self.config.get("token_url") or f"{base}/token"
+ self.user_info_url = self.config.get("user_info_url") or f"{base}/userinfo"
+
+ # 字段映射配置(用户自定义)
+ self.field_mapping = self.config.get("field_mapping", {})
+
+ # 标准 OIDC 字段 fallback 顺序
+ self.FIELD_DEFAULTS = {
+ "user_id": ["sub", "userId", "id"],
+ "name": ["name", "userName", "preferred_username", "nickname"],
+ "email": ["email"],
+ "mobile": ["phone_number", "mobile", "phone"],
+ "avatar": ["picture", "avatar_url", "avatar"],
+ }
+
+ async def get_authorization_url(self, redirect_uri: str, state: str) -> str:
+ from urllib.parse import quote
+ params = (
+ f"response_type=code"
+ f"&client_id={quote(self.client_id)}"
+ f"&redirect_uri={quote(redirect_uri)}"
+ f"&scope={quote(self.scope)}"
+ f"&state={state}"
+ )
+ return f"{self.authorize_url}?{params}"
+
+ async def exchange_code_for_token(self, code: str) -> dict:
+ import base64
+ credentials = base64.b64encode(f"{self.client_id}:{self.client_secret}".encode()).decode()
+
+ async with httpx.AsyncClient() as client:
+ resp = await client.post(
+ self.token_url,
+ headers={
+ "Authorization": f"Basic {credentials}",
+ },
+ data={
+ "grant_type": "authorization_code",
+ "code": code,
+ },
+ )
+ if resp.status_code != 200:
+ logger.error(f"OAuth2 token exchange failed (HTTP {resp.status_code}): {resp.text}")
+ return {}
+ return resp.json()
+
+ def _get_field(self, data: dict, field_key: str) -> str:
+ """Get a field value using user-defined mapping first, then standard OIDC fallbacks."""
+ # 1. 优先用用户配置的映射字段
+ custom_key = self.field_mapping.get(field_key)
+ if custom_key and data.get(custom_key):
+ return str(data[custom_key])
+ # 2. 依次尝试标准 fallback 字段
+ for std_key in self.FIELD_DEFAULTS.get(field_key, []):
+ if data.get(std_key):
+ return str(data[std_key])
+ return ""
+
+ async def get_user_info(self, access_token: str) -> ExternalUserInfo:
+ async with httpx.AsyncClient() as client:
+ resp = await client.get(
+ self.user_info_url,
+ headers={"Authorization": f"Bearer {access_token}"},
+ )
+ resp_data = resp.json()
+
+ # 特殊格式: {"status": 0, "data": {...}}
+ # 标准 OIDC 格式: 直接返回 flat object
+ if "data" in resp_data and isinstance(resp_data["data"], dict):
+ info = resp_data["data"]
+ else:
+ info = resp_data
+
+ logger.info(f"OAuth2 user info: {info}")
+
+ # 通用字段解析(优先用户自定义映射,再 fallback 到标准字段)
+ user_id = self._get_field(info, "user_id")
+ name = self._get_field(info, "name")
+ email = self._get_field(info, "email")
+ mobile = self._get_field(info, "mobile")
+
+ return ExternalUserInfo(
+ provider_type=self.provider_type,
+ provider_user_id=str(user_id),
+ name=name,
+ email=email,
+ mobile=mobile,
+ raw_data=info,
+ )
+
+ async def get_user_info_from_token_data(self, token_data: dict) -> ExternalUserInfo:
+ """Extract user info from token exchange response (fallback when userinfo endpoint fails)."""
+ info = token_data.copy()
+ if "userInfo" in info and isinstance(info["userInfo"], dict):
+ info = {**info, **info["userInfo"]}
+ logger.info(f"OAuth2 user info from token_data: {info}")
+ user_id = self._get_field(info, "user_id") or info.get("openid", "")
+ name = self._get_field(info, "name")
+ email = self._get_field(info, "email")
+ mobile = self._get_field(info, "mobile")
+ return ExternalUserInfo(
+ provider_type=self.provider_type,
+ provider_user_id=str(user_id),
+ name=name,
+ email=email,
+ mobile=mobile,
+ raw_data=info,
+ )
+
+ async def _create_new_user(self, db, user_info, tenant_id):
+ """Override to use provider_user_id as username for OAuth2."""
+ from app.services.registration_service import registration_service
+ from app.models.user import Identity as IdentityModel
+
+ # 优先用 provider_user_id(如 userId="zhangsan"),再用 email 前缀,最后 fallback
+ username = (
+ user_info.provider_user_id
+ or (user_info.email.split("@")[0] if user_info.email else None)
+ or f"oauth2_user"
+ )
+
+ # Check username uniqueness via Identity
+ existing = await db.execute(
+ select(User).join(User.identity).where(
+ Identity.username == username,
+ )
+ )
+ if existing.scalar_one_or_none():
+ logger.error(
+ f"[OAuth2] Username conflict detected: {username} already exists in tenant {tenant_id}. "
+ f"This should not happen if find_or_create_user match chain is working correctly."
+ )
+ raise HTTPException(
+ status_code=409,
+ detail=f"Username '{username}' already exists. Please contact administrator."
+ )
+
+ email = user_info.email or f"{username}@oauth2.local"
+
+ # Create or find Identity
+ identity = await registration_service.find_or_create_identity(
+ db,
+ email=email,
+ phone=user_info.mobile,
+ username=username,
+ password=user_info.provider_user_id or username,
+ )
+
+ # Create tenant-scoped User
+ user = User(
+ identity_id=identity.id,
+ display_name=user_info.name or username,
+ avatar_url=user_info.avatar_url,
+ registration_source=self.provider_type,
+ tenant_id=tenant_id,
+ is_active=True,
+ )
+
+ db.add(user)
+ await db.flush()
+
+ # Preload identity
+ user.identity = identity
+ return user
+
+
class MicrosoftTeamsAuthProvider(BaseAuthProvider):
"""Microsoft Teams OAuth provider implementation."""
@@ -536,5 +793,6 @@ async def get_user_info(self, access_token: str) -> ExternalUserInfo:
"feishu": FeishuAuthProvider,
"dingtalk": DingTalkAuthProvider,
"wecom": WeComAuthProvider,
+ "oauth2": OAuth2AuthProvider,
"microsoft_teams": MicrosoftTeamsAuthProvider,
}
diff --git a/backend/app/services/channel_commands.py b/backend/app/services/channel_commands.py
new file mode 100644
index 00000000..cd7bd9cc
--- /dev/null
+++ b/backend/app/services/channel_commands.py
@@ -0,0 +1,75 @@
+"""Channel command handler for external channels (DingTalk, Feishu, etc.)
+
+Supports slash commands like /new to reset session context.
+"""
+
+import uuid
+from datetime import datetime, timezone
+from sqlalchemy import select
+from sqlalchemy.ext.asyncio import AsyncSession
+
+from app.models.chat_session import ChatSession
+from app.services.channel_session import find_or_create_channel_session
+
+
+COMMANDS = {"/new", "/reset"}
+
+
+def is_channel_command(text: str) -> bool:
+ """Check if the message is a recognized channel command."""
+ stripped = text.strip().lower()
+ return stripped in COMMANDS
+
+
+async def handle_channel_command(
+ db: AsyncSession,
+ command: str,
+ agent_id: uuid.UUID,
+ user_id: uuid.UUID,
+ external_conv_id: str,
+ source_channel: str,
+) -> dict:
+ """Handle a channel command and return response info.
+
+ Returns:
+ {"action": "new_session", "message": "..."}
+ """
+ cmd = command.strip().lower()
+
+ if cmd in ("/new", "/reset"):
+ # Find current session
+ result = await db.execute(
+ select(ChatSession).where(
+ ChatSession.agent_id == agent_id,
+ ChatSession.external_conv_id == external_conv_id,
+ )
+ )
+ old_session = result.scalar_one_or_none()
+
+ if old_session:
+ # Rename old external_conv_id so find_or_create will make a new one
+ now = datetime.now(timezone.utc)
+ old_session.external_conv_id = (
+ f"{external_conv_id}__archived_{now.strftime('%Y%m%d_%H%M%S')}"
+ )
+ await db.flush()
+
+ # Create new session
+ new_session = ChatSession(
+ agent_id=agent_id,
+ user_id=user_id,
+ title="New Session",
+ source_channel=source_channel,
+ external_conv_id=external_conv_id,
+ created_at=datetime.now(timezone.utc),
+ )
+ db.add(new_session)
+ await db.flush()
+
+ return {
+ "action": "new_session",
+ "session_id": str(new_session.id),
+ "message": "已开启新对话,之前的上下文已清除。",
+ }
+
+ return {"action": "unknown", "message": f"未知命令: {cmd}"}
diff --git a/backend/app/services/dingtalk_reaction.py b/backend/app/services/dingtalk_reaction.py
new file mode 100644
index 00000000..4899de78
--- /dev/null
+++ b/backend/app/services/dingtalk_reaction.py
@@ -0,0 +1,110 @@
+"""DingTalk emotion reaction service — "thinking" indicator on user messages."""
+
+import asyncio
+from loguru import logger
+from app.services.dingtalk_token import dingtalk_token_manager
+
+
+async def add_thinking_reaction(
+ app_key: str,
+ app_secret: str,
+ message_id: str,
+ conversation_id: str,
+) -> bool:
+ """Add "🤔思考中" reaction to a user message. Fire-and-forget, never raises."""
+ import httpx
+
+ if not message_id or not conversation_id or not app_key:
+ return False
+
+ try:
+ token = await dingtalk_token_manager.get_token(app_key, app_secret)
+ if not token:
+ logger.warning("[DingTalk Reaction] Failed to get access token")
+ return False
+
+ async with httpx.AsyncClient(timeout=5) as client:
+ resp = await client.post(
+ "https://api.dingtalk.com/v1.0/robot/emotion/reply",
+ headers={
+ "x-acs-dingtalk-access-token": token,
+ "Content-Type": "application/json",
+ },
+ json={
+ "robotCode": app_key,
+ "openMsgId": message_id,
+ "openConversationId": conversation_id,
+ "emotionType": 2,
+ "emotionName": "🤔思考中",
+ "textEmotion": {
+ "emotionId": "2659900",
+ "emotionName": "🤔思考中",
+ "text": "🤔思考中",
+ "backgroundId": "im_bg_1",
+ },
+ },
+ )
+ if resp.status_code == 200:
+ logger.info(f"[DingTalk Reaction] Thinking reaction added for msg {message_id[:16]}")
+ return True
+ else:
+ logger.warning(f"[DingTalk Reaction] Add failed: {resp.status_code} {resp.text[:200]}")
+ return False
+ except Exception as e:
+ logger.warning(f"[DingTalk Reaction] Add thinking reaction error: {e}")
+ return False
+
+
+async def recall_thinking_reaction(
+ app_key: str,
+ app_secret: str,
+ message_id: str,
+ conversation_id: str,
+) -> None:
+ """Recall "🤔思考中" reaction with retry (0ms, 1500ms, 5000ms). Fire-and-forget."""
+ import httpx
+
+ if not message_id or not conversation_id or not app_key:
+ return
+
+ delays = [0, 1.5, 5.0]
+
+ for delay in delays:
+ if delay > 0:
+ await asyncio.sleep(delay)
+
+ try:
+ token = await dingtalk_token_manager.get_token(app_key, app_secret)
+ if not token:
+ continue
+
+ async with httpx.AsyncClient(timeout=5) as client:
+ resp = await client.post(
+ "https://api.dingtalk.com/v1.0/robot/emotion/recall",
+ headers={
+ "x-acs-dingtalk-access-token": token,
+ "Content-Type": "application/json",
+ },
+ json={
+ "robotCode": app_key,
+ "openMsgId": message_id,
+ "openConversationId": conversation_id,
+ "emotionType": 2,
+ "emotionName": "🤔思考中",
+ "textEmotion": {
+ "emotionId": "2659900",
+ "emotionName": "🤔思考中",
+ "text": "🤔思考中",
+ "backgroundId": "im_bg_1",
+ },
+ },
+ )
+ if resp.status_code == 200:
+ logger.info(f"[DingTalk Reaction] Thinking reaction recalled for msg {message_id[:16]}")
+ return
+ else:
+ logger.warning(f"[DingTalk Reaction] Recall attempt failed: {resp.status_code}")
+ except Exception as e:
+ logger.warning(f"[DingTalk Reaction] Recall error: {e}")
+
+ logger.warning(f"[DingTalk Reaction] All recall attempts failed for msg {message_id[:16]}")
diff --git a/backend/app/services/dingtalk_service.py b/backend/app/services/dingtalk_service.py
index 010fc56a..1072e157 100644
--- a/backend/app/services/dingtalk_service.py
+++ b/backend/app/services/dingtalk_service.py
@@ -6,29 +6,12 @@
async def get_dingtalk_access_token(app_id: str, app_secret: str) -> dict:
- """Get DingTalk access_token using app_id and app_secret.
-
- API: https://open.dingtalk.com/document/orgapp/obtain-access_token
- """
- url = "https://oapi.dingtalk.com/gettoken"
- params = {
- "appkey": app_id,
- "appsecret": app_secret,
- }
-
- async with httpx.AsyncClient(timeout=10) as client:
- try:
- resp = await client.get(url, params=params)
- data = resp.json()
-
- if data.get("errcode") == 0:
- return {"access_token": data.get("access_token"), "expires_in": data.get("expires_in")}
- else:
- logger.error(f"[DingTalk] Failed to get access_token: {data}")
- return {"errcode": data.get("errcode"), "errmsg": data.get("errmsg")}
- except Exception as e:
- logger.error(f"[DingTalk] Network error getting access_token: {e}")
- return {"errcode": -1, "errmsg": str(e)}
+ """Get DingTalk access_token using app_id and app_secret."""
+ from app.services.dingtalk_token import dingtalk_token_manager
+ token = await dingtalk_token_manager.get_token(app_id, app_secret)
+ if token:
+ return {"access_token": token, "expires_in": 7200}
+ return {"access_token": None, "expires_in": 0}
async def send_dingtalk_v1_robot_oto_message(
diff --git a/backend/app/services/dingtalk_stream.py b/backend/app/services/dingtalk_stream.py
index 28a8ba8e..8b0bd505 100644
--- a/backend/app/services/dingtalk_stream.py
+++ b/backend/app/services/dingtalk_stream.py
@@ -5,15 +5,418 @@
"""
import asyncio
+import base64
+import json
import threading
import uuid
-from typing import Dict
+from pathlib import Path
+from typing import Dict, List, Optional, Tuple
+import httpx
from loguru import logger
from sqlalchemy import select
+from app.config import get_settings
from app.database import async_session
from app.models.channel_config import ChannelConfig
+from app.services.dingtalk_token import dingtalk_token_manager
+
+
+# ─── DingTalk Media Helpers ─────────────────────────────
+
+
+async def _get_media_download_url(
+ access_token: str, download_code: str, robot_code: str
+) -> Optional[str]:
+ """Get media file download URL from DingTalk API."""
+ try:
+ async with httpx.AsyncClient(timeout=10) as client:
+ resp = await client.post(
+ "https://api.dingtalk.com/v1.0/robot/messageFiles/download",
+ headers={"x-acs-dingtalk-access-token": access_token},
+ json={"downloadCode": download_code, "robotCode": robot_code},
+ )
+ data = resp.json()
+ url = data.get("downloadUrl")
+ if url:
+ return url
+ logger.error(f"[DingTalk] Failed to get download URL: {data}")
+ return None
+ except Exception as e:
+ logger.error(f"[DingTalk] Error getting download URL: {e}")
+ return None
+
+
+async def _download_file(url: str) -> Optional[bytes]:
+ """Download a file from a URL and return its bytes."""
+ try:
+ async with httpx.AsyncClient(timeout=60, follow_redirects=True) as client:
+ resp = await client.get(url)
+ resp.raise_for_status()
+ return resp.content
+ except Exception as e:
+ logger.error(f"[DingTalk] Error downloading file: {e}")
+ return None
+
+
+async def _download_dingtalk_media(
+ app_key: str, app_secret: str, download_code: str
+) -> Optional[bytes]:
+ """Download a media file from DingTalk using downloadCode.
+
+ Steps: get access_token -> get download URL -> download file bytes.
+ """
+ access_token = await dingtalk_token_manager.get_token(app_key, app_secret)
+ if not access_token:
+ return None
+
+ download_url = await _get_media_download_url(access_token, download_code, app_key)
+ if not download_url:
+ return None
+
+ return await _download_file(download_url)
+
+
+def _resolve_upload_dir(agent_id: uuid.UUID) -> Path:
+ """Get the uploads directory for an agent, creating it if needed."""
+ settings = get_settings()
+ upload_dir = Path(settings.AGENT_DATA_DIR) / str(agent_id) / "workspace" / "uploads"
+ upload_dir.mkdir(parents=True, exist_ok=True)
+ return upload_dir
+
+
+async def _process_media_message(
+ msg_data: dict,
+ app_key: str,
+ app_secret: str,
+ agent_id: uuid.UUID,
+) -> Tuple[str, Optional[str], Optional[str]]:
+ """Process a DingTalk message and extract text + media info.
+
+ Returns:
+ (user_text, image_base64_list, saved_file_paths)
+ - user_text: text content for the LLM (may include markers)
+ - image_base64_list: list of base64-encoded image data URIs, or None
+ - saved_file_paths: list of saved file paths, or None
+ """
+ msgtype = msg_data.get("msgtype", "text")
+ logger.info(f"[DingTalk] Processing message type: {msgtype}")
+
+ image_base64_list: List[str] = []
+ saved_file_paths: List[str] = []
+
+ if msgtype == "text":
+ # Plain text — handled by existing logic, return empty
+ text_content = msg_data.get("text", {}).get("content", "").strip()
+ return text_content, None, None
+
+ elif msgtype == "picture":
+ # Image message
+ download_code = msg_data.get("content", {}).get("downloadCode", "")
+ if not download_code:
+ # Try alternate location
+ download_code = msg_data.get("downloadCode", "")
+ if not download_code:
+ logger.warning("[DingTalk] Picture message without downloadCode")
+ return "[用户发送了图片,但无法下载]", None, None
+
+ file_bytes = await _download_dingtalk_media(app_key, app_secret, download_code)
+ if not file_bytes:
+ return "[用户发送了图片,但下载失败]", None, None
+
+ # Save to disk
+ upload_dir = _resolve_upload_dir(agent_id)
+ filename = f"dingtalk_img_{uuid.uuid4().hex[:8]}.jpg"
+ save_path = upload_dir / filename
+ save_path.write_bytes(file_bytes)
+ logger.info(f"[DingTalk] Saved image to {save_path} ({len(file_bytes)} bytes)")
+
+ # Base64 encode for LLM vision
+ b64_data = base64.b64encode(file_bytes).decode("ascii")
+ image_marker = f"[image_data:data:image/jpeg;base64,{b64_data}]"
+ return f"[用户发送了图片]\n{image_marker}", [f"data:image/jpeg;base64,{b64_data}"], [str(save_path)]
+
+ elif msgtype == "richText":
+ # Rich text: may contain text segments + images
+ rich_text = msg_data.get("content", {}).get("richText", [])
+ text_parts: List[str] = []
+
+ for section in rich_text:
+ for item in section if isinstance(section, list) else [section]:
+ if "text" in item:
+ text_parts.append(item["text"])
+ elif "downloadCode" in item:
+ # Inline image in rich text
+ file_bytes = await _download_dingtalk_media(
+ app_key, app_secret, item["downloadCode"]
+ )
+ if file_bytes:
+ upload_dir = _resolve_upload_dir(agent_id)
+ filename = f"dingtalk_richimg_{uuid.uuid4().hex[:8]}.jpg"
+ save_path = upload_dir / filename
+ save_path.write_bytes(file_bytes)
+ logger.info(f"[DingTalk] Saved rich text image to {save_path}")
+
+ b64_data = base64.b64encode(file_bytes).decode("ascii")
+ image_marker = f"[image_data:data:image/jpeg;base64,{b64_data}]"
+ text_parts.append(image_marker)
+ image_base64_list.append(f"data:image/jpeg;base64,{b64_data}")
+ saved_file_paths.append(str(save_path))
+
+ combined_text = "\n".join(text_parts).strip()
+ if not combined_text:
+ combined_text = "[用户发送了富文本消息]"
+
+ return (
+ combined_text,
+ image_base64_list if image_base64_list else None,
+ saved_file_paths if saved_file_paths else None,
+ )
+
+ elif msgtype == "audio":
+ # Audio message — prefer recognition text if available
+ content = msg_data.get("content", {})
+ recognition = content.get("recognition", "")
+ if recognition:
+ logger.info(f"[DingTalk] Audio with recognition: {recognition[:80]}")
+ return f"[语音消息] {recognition}", None, None
+
+ # No recognition — try to download the audio file
+ download_code = content.get("downloadCode", "")
+ if download_code:
+ file_bytes = await _download_dingtalk_media(app_key, app_secret, download_code)
+ if file_bytes:
+ upload_dir = _resolve_upload_dir(agent_id)
+ duration = content.get("duration", "unknown")
+ filename = f"dingtalk_audio_{uuid.uuid4().hex[:8]}.amr"
+ save_path = upload_dir / filename
+ save_path.write_bytes(file_bytes)
+ logger.info(f"[DingTalk] Saved audio to {save_path} ({len(file_bytes)} bytes)")
+ return (
+ f"[用户发送了语音消息,时长{duration}ms,已保存到 {filename}]",
+ None,
+ [str(save_path)],
+ )
+ return "[用户发送了语音消息,但无法处理]", None, None
+
+ elif msgtype == "video":
+ # Video message
+ content = msg_data.get("content", {})
+ download_code = content.get("downloadCode", "")
+ if download_code:
+ file_bytes = await _download_dingtalk_media(app_key, app_secret, download_code)
+ if file_bytes:
+ upload_dir = _resolve_upload_dir(agent_id)
+ duration = content.get("duration", "unknown")
+ filename = f"dingtalk_video_{uuid.uuid4().hex[:8]}.mp4"
+ save_path = upload_dir / filename
+ save_path.write_bytes(file_bytes)
+ logger.info(f"[DingTalk] Saved video to {save_path} ({len(file_bytes)} bytes)")
+ return (
+ f"[用户发送了视频,时长{duration}ms,已保存到 {filename}]",
+ None,
+ [str(save_path)],
+ )
+ return "[用户发送了视频,但无法下载]", None, None
+
+ elif msgtype == "file":
+ # File message
+ content = msg_data.get("content", {})
+ download_code = content.get("downloadCode", "")
+ original_filename = content.get("fileName", "unknown_file")
+ if download_code:
+ file_bytes = await _download_dingtalk_media(app_key, app_secret, download_code)
+ if file_bytes:
+ upload_dir = _resolve_upload_dir(agent_id)
+ # Preserve original filename, add prefix to avoid collision
+ safe_name = f"dingtalk_{uuid.uuid4().hex[:8]}_{original_filename}"
+ save_path = upload_dir / safe_name
+ save_path.write_bytes(file_bytes)
+ logger.info(
+ f"[DingTalk] Saved file '{original_filename}' to {save_path} "
+ f"({len(file_bytes)} bytes)"
+ )
+ return (
+ f"[file:{original_filename}]",
+ None,
+ [str(save_path)],
+ )
+ return f"[用户发送了文件 {original_filename},但无法下载]", None, None
+
+ else:
+ logger.warning(f"[DingTalk] Unsupported message type: {msgtype}")
+ return f"[用户发送了 {msgtype} 类型消息,暂不支持]", None, None
+
+
+# ─── DingTalk Media Upload & Send ───────────────────────
+
+async def _upload_dingtalk_media(
+ app_key: str,
+ app_secret: str,
+ file_path: str,
+ media_type: str = "file",
+) -> Optional[str]:
+ """Upload a media file to DingTalk and return the mediaId.
+
+ Args:
+ app_key: DingTalk app key (robotCode).
+ app_secret: DingTalk app secret.
+ file_path: Local file path to upload.
+ media_type: One of 'image', 'voice', 'video', 'file'.
+
+ Returns:
+ mediaId string on success, None on failure.
+ """
+ access_token = await dingtalk_token_manager.get_token(app_key, app_secret)
+ if not access_token:
+ return None
+
+ file_p = Path(file_path)
+ if not file_p.exists():
+ logger.error(f"[DingTalk] Upload failed: file not found: {file_path}")
+ return None
+
+ try:
+ file_bytes = file_p.read_bytes()
+ async with httpx.AsyncClient(timeout=60) as client:
+ # Use the legacy oapi endpoint which is more reliable and widely supported.
+ # The newer api.dingtalk.com/v1.0/robot/messageFiles/upload requires
+ # additional permissions and returns InvalidAction.NotFound for some apps.
+ upload_url = (
+ f"https://oapi.dingtalk.com/media/upload"
+ f"?access_token={access_token}&type={media_type}"
+ )
+ resp = await client.post(
+ upload_url,
+ files={"media": (file_p.name, file_bytes)},
+ )
+ data = resp.json()
+ # Legacy API returns media_id (snake_case), new API returns mediaId
+ media_id = data.get("media_id") or data.get("mediaId")
+ if media_id and data.get("errcode", 0) == 0:
+ logger.info(
+ f"[DingTalk] Uploaded {media_type} '{file_p.name}' -> mediaId={media_id[:20]}..."
+ )
+ return media_id
+ logger.error(f"[DingTalk] Upload failed: {data}")
+ return None
+ except Exception as e:
+ logger.error(f"[DingTalk] Upload error: {e}")
+ return None
+
+
+async def _send_dingtalk_media_message(
+ app_key: str,
+ app_secret: str,
+ target_id: str,
+ media_id: str,
+ media_type: str,
+ conversation_type: str,
+ filename: Optional[str] = None,
+) -> bool:
+ """Send a media message via DingTalk proactive message API.
+
+ Args:
+ app_key: DingTalk app key (robotCode).
+ app_secret: DingTalk app secret.
+ target_id: For P2P: sender_staff_id; For group: openConversationId.
+ media_id: The mediaId from upload.
+ media_type: One of 'image', 'voice', 'video', 'file'.
+ conversation_type: '1' for P2P, '2' for group.
+ filename: Original filename (used for file/video types).
+
+ Returns:
+ True on success, False on failure.
+ """
+ access_token = await dingtalk_token_manager.get_token(app_key, app_secret)
+ if not access_token:
+ return False
+
+ headers = {"x-acs-dingtalk-access-token": access_token}
+
+ # Build msgKey and msgParam based on media_type
+ if media_type == "image":
+ msg_key = "sampleImageMsg"
+ msg_param = json.dumps({"photoURL": media_id})
+ elif media_type == "voice":
+ msg_key = "sampleAudio"
+ msg_param = json.dumps({"mediaId": media_id, "duration": "3000"})
+ elif media_type == "video":
+ # sampleVideo requires picMediaId (thumbnail) which we don't have;
+ # use sampleFile instead for broader compatibility (same as OpenClaw plugin).
+ safe_name = filename or "video.mp4"
+ ext = Path(safe_name).suffix.lstrip(".") or "mp4"
+ msg_key = "sampleFile"
+ msg_param = json.dumps({
+ "mediaId": media_id,
+ "fileName": safe_name,
+ "fileType": ext,
+ })
+ else:
+ # file
+ safe_name = filename or "file"
+ ext = Path(safe_name).suffix.lstrip(".") or "bin"
+ msg_key = "sampleFile"
+ msg_param = json.dumps({
+ "mediaId": media_id,
+ "fileName": safe_name,
+ "fileType": ext,
+ })
+
+ try:
+ async with httpx.AsyncClient(timeout=15) as client:
+ if conversation_type == "2":
+ # Group chat
+ resp = await client.post(
+ "https://api.dingtalk.com/v1.0/robot/groupMessages/send",
+ headers=headers,
+ json={
+ "robotCode": app_key,
+ "openConversationId": target_id,
+ "msgKey": msg_key,
+ "msgParam": msg_param,
+ },
+ )
+ else:
+ # P2P chat
+ resp = await client.post(
+ "https://api.dingtalk.com/v1.0/robot/oToMessages/batchSend",
+ headers=headers,
+ json={
+ "robotCode": app_key,
+ "userIds": [target_id],
+ "msgKey": msg_key,
+ "msgParam": msg_param,
+ },
+ )
+
+ data = resp.json()
+ # Check for error
+ if resp.status_code >= 400 or data.get("errcode"):
+ logger.error(f"[DingTalk] Send media failed: {data}")
+ return False
+
+ logger.info(
+ f"[DingTalk] Sent {media_type} message to {target_id[:16]}... "
+ f"(conv_type={conversation_type})"
+ )
+ return True
+ except Exception as e:
+ logger.error(f"[DingTalk] Send media error: {e}")
+ return False
+
+
+# ─── Stream Manager ─────────────────────────────────────
+
+def _fire_and_forget(loop, coro):
+ """Schedule a coroutine on the main loop and log any unhandled exception."""
+ future = asyncio.run_coroutine_threadsafe(coro, loop)
+ def _on_done(f):
+ try:
+ f.result()
+ except Exception:
+ logger.exception("[DingTalk Stream] Unhandled error in fire-and-forget coroutine")
+ future.add_done_callback(_on_done)
class DingTalkStreamManager:
@@ -67,47 +470,63 @@ def _run_client_thread(
app_secret: str,
stop_event: threading.Event,
):
- """Run the DingTalk Stream client in a blocking thread."""
- try:
- import dingtalk_stream
-
- # Reference to manager's main loop for async dispatch
- main_loop = self._main_loop
-
- class ClawithChatbotHandler(dingtalk_stream.ChatbotHandler):
- """Custom handler that dispatches messages to the Clawith LLM pipeline."""
-
- async def process(self, callback: dingtalk_stream.CallbackMessage):
- """Handle incoming bot message from DingTalk Stream.
-
- NOTE: The SDK invokes this method in the thread's own asyncio loop,
- so we must dispatch to the main FastAPI loop for DB + LLM work.
- """
- try:
- # Parse the raw data into a ChatbotMessage via class method
- incoming = dingtalk_stream.ChatbotMessage.from_dict(callback.data)
-
- # Extract text content
+ """Run the DingTalk Stream client with auto-reconnect."""
+ import dingtalk_stream # ImportError here exits immediately (no retry)
+
+ MAX_RETRIES = 5
+ RETRY_DELAYS = [2, 5, 15, 30, 60] # exponential backoff, seconds
+
+ main_loop = self._main_loop
+ retries = 0
+
+ class ClawithChatbotHandler(dingtalk_stream.ChatbotHandler):
+ """Custom handler that dispatches messages to the Clawith LLM pipeline."""
+
+ async def process(self, callback: dingtalk_stream.CallbackMessage):
+ """Handle incoming bot message from DingTalk Stream.
+
+ NOTE: The SDK invokes this method in the thread's own asyncio loop,
+ so we must dispatch to the main FastAPI loop for DB + LLM work.
+ """
+ try:
+ # Parse the raw data
+ incoming = dingtalk_stream.ChatbotMessage.from_dict(callback.data)
+ msg_data = callback.data if isinstance(callback.data, dict) else json.loads(callback.data)
+
+ msgtype = msg_data.get("msgtype", "text")
+ sender_staff_id = incoming.sender_staff_id or ""
+ sender_id = incoming.sender_id or ""
+ if not sender_staff_id and sender_id:
+ sender_staff_id = sender_id # fallback
+ sender_nick = incoming.sender_nick or ""
+ message_id = incoming.message_id or ""
+ conversation_id = incoming.conversation_id or ""
+ conversation_type = incoming.conversation_type or "1"
+ session_webhook = incoming.session_webhook or ""
+
+ logger.info(
+ f"[DingTalk Stream] Received {msgtype} message from {sender_staff_id}"
+ )
+
+ if msgtype == "text":
+ # Plain text — use existing logic
text_list = incoming.get_text_list()
user_text = " ".join(text_list).strip() if text_list else ""
-
if not user_text:
return dingtalk_stream.AckMessage.STATUS_OK, "empty message"
- sender_staff_id = incoming.sender_staff_id or incoming.sender_id or ""
- conversation_id = incoming.conversation_id or ""
- conversation_type = incoming.conversation_type or "1"
- session_webhook = incoming.session_webhook or ""
-
logger.info(
- f"[DingTalk Stream] Message from [{incoming.sender_nick}]{sender_staff_id}: {user_text[:80]}"
+ f"[DingTalk Stream] Text from {sender_staff_id}: {user_text[:80]}"
)
- # Dispatch to the main FastAPI event loop for DB + LLM processing
from app.api.dingtalk import process_dingtalk_message
if main_loop and main_loop.is_running():
- future = asyncio.run_coroutine_threadsafe(
+ # Add thinking reaction immediately
+ from app.services.dingtalk_reaction import add_thinking_reaction
+ _fire_and_forget(main_loop,
+ add_thinking_reaction(app_key, app_secret, message_id, conversation_id))
+ _fire_and_forget(main_loop,
process_dingtalk_message(
agent_id=agent_id,
sender_staff_id=sender_staff_id,
@@ -115,50 +534,178 @@ async def process(self, callback: dingtalk_stream.CallbackMessage):
conversation_id=conversation_id,
conversation_type=conversation_type,
session_webhook=session_webhook,
- ),
- main_loop,
- )
- # Wait for result (with timeout)
- try:
- future.result(timeout=120)
- except Exception as e:
- logger.error(f"[DingTalk Stream] LLM processing error: {e}")
- import traceback
- traceback.print_exc()
+ sender_nick=sender_nick,
+ message_id=message_id,
+ sender_id=sender_id,
+ ))
+ # Fire-and-forget: ACK immediately, do not wait for LLM
else:
- logger.warning("[DingTalk Stream] Main loop not available for dispatch")
-
- return dingtalk_stream.AckMessage.STATUS_OK, "ok"
- except Exception as e:
- logger.error(f"[DingTalk Stream] Error in message handler: {e}")
- import traceback
- traceback.print_exc()
- return dingtalk_stream.AckMessage.STATUS_SYSTEM_EXCEPTION, str(e)
-
- credential = dingtalk_stream.Credential(client_id=app_key, client_secret=app_secret)
- client = dingtalk_stream.DingTalkStreamClient(credential=credential)
- client.register_callback_handler(
- dingtalk_stream.chatbot.ChatbotMessage.TOPIC,
- ClawithChatbotHandler(),
- )
+ logger.warning("[DingTalk Stream] Main loop not available")
- logger.info(f"[DingTalk Stream] Connecting for agent {agent_id}...")
- # start_forever() blocks until disconnected
- client.start_forever()
+ else:
+ # Non-text message: process media in the main loop
+ from app.api.dingtalk import process_dingtalk_message
- except ImportError:
- logger.warning(
- "[DingTalk Stream] dingtalk-stream package not installed. "
- "Install with: pip install dingtalk-stream"
- )
+ if main_loop and main_loop.is_running():
+ # Add thinking reaction immediately
+ from app.services.dingtalk_reaction import add_thinking_reaction
+ _fire_and_forget(main_loop,
+ add_thinking_reaction(app_key, app_secret, message_id, conversation_id))
+ # Process media (download + encode) in the main loop
+ _fire_and_forget(main_loop,
+ self._handle_media_and_dispatch(
+ msg_data=msg_data,
+ app_key=app_key,
+ app_secret=app_secret,
+ agent_id=agent_id,
+ sender_staff_id=sender_staff_id,
+ conversation_id=conversation_id,
+ conversation_type=conversation_type,
+ session_webhook=session_webhook,
+ sender_nick=sender_nick,
+ message_id=message_id,
+ sender_id=sender_id,
+ ))
+ # Fire-and-forget: ACK immediately, do not wait for LLM
+ else:
+ logger.warning("[DingTalk Stream] Main loop not available")
+
+ return dingtalk_stream.AckMessage.STATUS_OK, "ok"
+ except Exception as e:
+ logger.error(f"[DingTalk Stream] Error in message handler: {e}")
+ import traceback
+ traceback.print_exc()
+ return dingtalk_stream.AckMessage.STATUS_SYSTEM_EXCEPTION, str(e)
+
+ @staticmethod
+ async def _handle_media_and_dispatch(
+ msg_data: dict,
+ app_key: str,
+ app_secret: str,
+ agent_id: uuid.UUID,
+ sender_staff_id: str,
+ conversation_id: str,
+ conversation_type: str,
+ session_webhook: str,
+ sender_nick: str = "",
+ message_id: str = "",
+ sender_id: str = "",
+ ):
+ """Download media, then dispatch to process_dingtalk_message."""
+ from app.api.dingtalk import process_dingtalk_message
+
+ user_text, image_base64_list, saved_file_paths = await _process_media_message(
+ msg_data=msg_data,
+ app_key=app_key,
+ app_secret=app_secret,
+ agent_id=agent_id,
+ )
+
+ if not user_text:
+ logger.info("[DingTalk Stream] Empty content after media processing, skipping")
+ return
+
+ await process_dingtalk_message(
+ agent_id=agent_id,
+ sender_staff_id=sender_staff_id,
+ user_text=user_text,
+ conversation_id=conversation_id,
+ conversation_type=conversation_type,
+ session_webhook=session_webhook,
+ image_base64_list=image_base64_list,
+ saved_file_paths=saved_file_paths,
+ sender_nick=sender_nick,
+ message_id=message_id,
+ sender_id=sender_id,
+ )
+
+ while not stop_event.is_set() and retries <= MAX_RETRIES:
+ try:
+ credential = dingtalk_stream.Credential(client_id=app_key, client_secret=app_secret)
+ client = dingtalk_stream.DingTalkStreamClient(credential=credential)
+ client.register_callback_handler(
+ dingtalk_stream.chatbot.ChatbotMessage.TOPIC,
+ ClawithChatbotHandler(),
+ )
+
+ logger.info(
+ f"[DingTalk Stream] Connecting for agent {agent_id}... "
+ f"(attempt {retries + 1}/{MAX_RETRIES + 1})"
+ )
+ retries = 0 # reset on successful connection
+ # start_forever() blocks until disconnected
+ client.start_forever()
+
+ # start_forever returned — connection dropped
+ if stop_event.is_set():
+ break # intentional stop, no retry
+
+ logger.warning(
+ f"[DingTalk Stream] Connection lost for agent {agent_id}, will retry..."
+ )
+
+ except ImportError:
+ logger.warning(
+ "[DingTalk Stream] dingtalk-stream package not installed. "
+ "Install with: pip install dingtalk-stream"
+ )
+ break # no point retrying without the package
+ except Exception as e:
+ retries += 1
+ logger.error(
+ f"[DingTalk Stream] Connection error for {agent_id} "
+ f"(attempt {retries}/{MAX_RETRIES + 1}): {e}"
+ )
+
+ if retries > MAX_RETRIES:
+ logger.error(
+ f"[DingTalk Stream] Agent {agent_id} exhausted all retries, giving up"
+ )
+ # Notify creator about permanent failure
+ if main_loop and main_loop.is_running():
+ asyncio.run_coroutine_threadsafe(
+ self._notify_connection_failed(agent_id, str(e)),
+ main_loop,
+ )
+ break
+
+ delay = RETRY_DELAYS[min(retries - 1, len(RETRY_DELAYS) - 1)]
+ logger.info(
+ f"[DingTalk Stream] Retrying in {delay}s for agent {agent_id}..."
+ )
+ # Use stop_event.wait so we exit immediately if stopped
+ if stop_event.wait(timeout=delay):
+ break # stop was requested during wait
+
+ self._threads.pop(agent_id, None)
+ self._stop_events.pop(agent_id, None)
+ logger.info(f"[DingTalk Stream] Client stopped for agent {agent_id}")
+
+ async def _notify_connection_failed(self, agent_id: uuid.UUID, error_msg: str):
+ """Send notification to agent creator when DingTalk connection permanently fails."""
+ try:
+ from app.models.agent import Agent
+ from app.services.notification_service import send_notification
+ async with async_session() as db:
+ result = await db.execute(select(Agent).where(Agent.id == agent_id))
+ agent = result.scalar_one_or_none()
+ if agent and agent.creator_id:
+ await send_notification(
+ db,
+ user_id=agent.creator_id,
+ agent_id=agent_id,
+ type="channel_error",
+ title=f"DingTalk connection failed for {agent.name}",
+ body=(
+ f"Failed to connect after multiple retries. "
+ f"Last error: {error_msg[:200]}. "
+ f"Please check your DingTalk app credentials and try reconfiguring the channel."
+ ),
+ link=f"/agents/{agent_id}#settings",
+ )
+ await db.commit()
except Exception as e:
- logger.error(f"[DingTalk Stream] Client error for {agent_id}: {e}")
- import traceback
- traceback.print_exc()
- finally:
- self._threads.pop(agent_id, None)
- self._stop_events.pop(agent_id, None)
- logger.info(f"[DingTalk Stream] Client stopped for agent {agent_id}")
+ logger.error(f"[DingTalk Stream] Failed to send connection failure notification: {e}")
async def stop_client(self, agent_id: uuid.UUID):
"""Stop a running Stream client for an agent."""
@@ -167,7 +714,10 @@ async def stop_client(self, agent_id: uuid.UUID):
stop_event.set()
thread = self._threads.pop(agent_id, None)
if thread and thread.is_alive():
- logger.info(f"[DingTalk Stream] Stopping client for agent {agent_id}")
+ logger.info(f"[DingTalk Stream] Stopping client for agent {agent_id}, waiting for thread...")
+ thread.join(timeout=5)
+ if thread.is_alive():
+ logger.warning(f"[DingTalk Stream] Thread for {agent_id} did not exit within 5s")
async def start_all(self):
"""Start Stream clients for all configured DingTalk agents."""
diff --git a/backend/app/services/dingtalk_token.py b/backend/app/services/dingtalk_token.py
new file mode 100644
index 00000000..9e88e277
--- /dev/null
+++ b/backend/app/services/dingtalk_token.py
@@ -0,0 +1,76 @@
+"""DingTalk access_token global cache manager.
+
+Caches tokens per app_key with auto-refresh before expiry.
+All DingTalk token acquisition should go through this manager.
+Tokens are stored in Redis (preferred) with in-memory fallback.
+"""
+
+import time
+import asyncio
+from typing import Dict, Optional
+from loguru import logger
+import httpx
+
+
+class DingTalkTokenManager:
+ """Global DingTalk access_token cache backed by Redis + memory fallback.
+
+ - Cache by app_key
+ - Token valid for 7200s, refresh 300s early (TTL=6900)
+ - Concurrency-safe with asyncio.Lock
+ """
+
+ def __init__(self):
+ self._locks: Dict[str, asyncio.Lock] = {}
+
+ def _get_lock(self, app_key: str) -> asyncio.Lock:
+ if app_key not in self._locks:
+ self._locks[app_key] = asyncio.Lock()
+ return self._locks[app_key]
+
+ def _cache_key(self, app_key: str) -> str:
+ return f"clawith:token:dingtalk_corp:{app_key}"
+
+ async def get_token(self, app_key: str, app_secret: str) -> Optional[str]:
+ """Get access_token, return cached if valid, refresh if expired."""
+ from app.core.token_cache import get_cached_token, set_cached_token
+
+ key = self._cache_key(app_key)
+
+ # Fast path: check cache without lock
+ cached = await get_cached_token(key)
+ if cached:
+ return cached
+
+ async with self._get_lock(app_key):
+ # Double-check after acquiring lock
+ cached = await get_cached_token(key)
+ if cached:
+ return cached
+
+ try:
+ async with httpx.AsyncClient(timeout=10) as client:
+ resp = await client.post(
+ "https://api.dingtalk.com/v1.0/oauth2/accessToken",
+ json={"appKey": app_key, "appSecret": app_secret},
+ )
+ data = resp.json()
+ token = data.get("accessToken")
+ expires_in = data.get("expireIn", 7200)
+
+ if token:
+ # TTL = expires_in - 300s (refresh 5 min early)
+ ttl = max(expires_in - 300, 60)
+ await set_cached_token(key, token, ttl)
+ logger.debug(f"[DingTalk Token] Refreshed for {app_key[:8]}..., expires in {expires_in}s, TTL={ttl}s")
+ return token
+
+ logger.error(f"[DingTalk Token] Failed to get token: {data}")
+ return None
+ except Exception as e:
+ logger.error(f"[DingTalk Token] Error getting token: {e}")
+ return None
+
+
+# Global singleton
+dingtalk_token_manager = DingTalkTokenManager()
diff --git a/backend/app/services/feishu_service.py b/backend/app/services/feishu_service.py
index 2a976413..a4080185 100644
--- a/backend/app/services/feishu_service.py
+++ b/backend/app/services/feishu_service.py
@@ -30,10 +30,24 @@ async def get_app_access_token(self) -> str:
return await self.get_tenant_access_token(self.app_id, self.app_secret)
async def get_tenant_access_token(self, app_id: str = None, app_secret: str = None) -> str:
- """Get or refresh the app-level access token (tenant_access_token)."""
+ """Get or refresh the app-level access token (tenant_access_token).
+
+ Cached in Redis (preferred) with in-memory fallback.
+ Key: clawith:token:feishu_tenant:{app_id}
+ TTL: 6900s (7200s validity - 5 min early refresh)
+ """
+ from app.core.token_cache import get_cached_token, set_cached_token
+
target_app_id = app_id or self.app_id
target_app_secret = app_secret or self.app_secret
-
+ cache_key = f"clawith:token:feishu_tenant:{target_app_id}"
+
+ cached = await get_cached_token(cache_key)
+ if cached:
+ if not app_id:
+ self._app_access_token = cached
+ return cached
+
async with httpx.AsyncClient() as client:
resp = await client.post(FEISHU_APP_TOKEN_URL, json={
"app_id": target_app_id,
@@ -42,8 +56,12 @@ async def get_tenant_access_token(self, app_id: str = None, app_secret: str = No
data = resp.json()
token = data.get("tenant_access_token") or data.get("app_access_token", "")
- if not app_id: # only cache default app token
- self._app_access_token = token
+ expire = data.get("expire", 7200)
+ if token:
+ ttl = max(expire - 300, 60)
+ await set_cached_token(cache_key, token, ttl)
+ if not app_id: # only update instance var for default app token
+ self._app_access_token = token
return token
diff --git a/backend/app/services/image_context.py b/backend/app/services/image_context.py
new file mode 100644
index 00000000..c4bf9eb6
--- /dev/null
+++ b/backend/app/services/image_context.py
@@ -0,0 +1,112 @@
+"""Re-hydrate image content from disk for LLM multi-turn context.
+
+Scans history messages for [file:xxx.jpg] patterns,
+reads the image file from agent workspace, and injects base64 data
+so the LLM can see images from previous turns.
+"""
+
+import base64
+import re
+from pathlib import Path
+from typing import Optional
+
+from loguru import logger
+from app.config import get_settings
+
+IMAGE_EXTENSIONS = {'.jpg', '.jpeg', '.png', '.gif', '.webp', '.bmp'}
+FILE_PATTERN = re.compile(r'\[file:([^\]]+)\]')
+IMAGE_DATA_PATTERN = re.compile(
+ r'\[image_data:data:image/[^;]+;base64,[A-Za-z0-9+/=]+\]'
+)
+MAX_IMAGE_BYTES = 5 * 1024 * 1024 # 5MB per image
+
+
+def rehydrate_image_messages(
+ messages: list[dict],
+ agent_id,
+ max_images: int = 3,
+) -> list[dict]:
+ """Scan history for [file:xxx.jpg] and inject base64 image data for LLM.
+
+ Only processes the most recent `max_images` user image messages
+ to limit context size and cost.
+
+ Args:
+ messages: List of {"role": ..., "content": ...} dicts
+ agent_id: Agent UUID for resolving file paths
+ max_images: Max number of historical images to re-hydrate
+
+ Returns:
+ New list with image messages enriched with base64 data.
+ Non-image messages and messages with existing image_data are unchanged.
+ """
+ settings = get_settings()
+ upload_dir = (
+ Path(settings.AGENT_DATA_DIR) / str(agent_id) / "workspace" / "uploads"
+ )
+
+ # Find user messages with [file:xxx.jpg] (newest first, skip current turn)
+ image_indices: list[tuple[int, str]] = [] # (index, filename)
+ for i in range(len(messages) - 1, -1, -1):
+ msg = messages[i]
+ if msg.get("role") != "user":
+ continue
+ content = msg.get("content", "")
+ if not isinstance(content, str):
+ continue
+ # Skip if already has image_data (current turn)
+ if "[image_data:" in content:
+ continue
+ match = FILE_PATTERN.search(content)
+ if not match:
+ continue
+ filename = match.group(1)
+ ext = Path(filename).suffix.lower()
+ if ext not in IMAGE_EXTENSIONS:
+ continue
+ image_indices.append((i, filename))
+ if len(image_indices) >= max_images:
+ break
+
+ if not image_indices:
+ return messages
+
+ # Re-hydrate in-place (working on a copy)
+ result = list(messages)
+ rehydrated = 0
+
+ for idx, filename in image_indices:
+ file_path = upload_dir / filename
+ if not file_path.exists():
+ logger.warning(f"[ImageContext] File not found: {file_path}")
+ continue
+ try:
+ img_bytes = file_path.read_bytes()
+ if len(img_bytes) > MAX_IMAGE_BYTES:
+ logger.info(
+ f"[ImageContext] Skipping large image: "
+ f"{filename} ({len(img_bytes)} bytes)"
+ )
+ continue
+
+ b64 = base64.b64encode(img_bytes).decode("ascii")
+ ext = file_path.suffix.lower().lstrip('.')
+ mime = f"image/{'jpeg' if ext == 'jpg' else ext}"
+ marker = f"[image_data:data:{mime};base64,{b64}]"
+
+ # Append image_data marker to existing content
+ old_content = result[idx]["content"]
+ result[idx] = {**result[idx], "content": f"{old_content}\n{marker}"}
+ rehydrated += 1
+ logger.debug(f"[ImageContext] Re-hydrated: {filename}")
+
+ except Exception as e:
+ logger.error(f"[ImageContext] Failed to read {filename}: {e}")
+
+ if rehydrated > 0:
+ logger.info(
+ f"[ImageContext] Re-hydrated {rehydrated} image(s) "
+ f"for agent {agent_id}"
+ )
+
+ return result
diff --git a/backend/app/services/org_sync_adapter.py b/backend/app/services/org_sync_adapter.py
index b317b358..71146145 100644
--- a/backend/app/services/org_sync_adapter.py
+++ b/backend/app/services/org_sync_adapter.py
@@ -764,12 +764,23 @@ def api_base_url(self) -> str:
return self.DINGTALK_API_URL
async def get_access_token(self) -> str:
- if self._access_token and self._token_expires_at and datetime.now() < self._token_expires_at:
- return self._access_token
+ """Get or refresh the DingTalk access token.
+
+ Cached in Redis (preferred) with in-memory fallback.
+ Key: clawith:token:dingtalk_corp:{app_key}
+ TTL: expires_in - 300s (5 min early refresh)
+ """
+ from app.core.token_cache import get_cached_token, set_cached_token
if not self.app_key or not self.app_secret:
raise ValueError("DingTalk app_key/app_secret missing in provider config")
+ cache_key = f"clawith:token:dingtalk_corp:{self.app_key}"
+ cached = await get_cached_token(cache_key)
+ if cached:
+ self._access_token = cached
+ return cached
+
async with httpx.AsyncClient() as client:
resp = await client.get(
self.DINGTALK_TOKEN_URL,
@@ -780,9 +791,11 @@ async def get_access_token(self) -> str:
raise RuntimeError(f"DingTalk token error: {data.get('errmsg') or data}")
token = data.get("access_token") or ""
expires_in = int(data.get("expires_in") or 7200)
- self._access_token = token
- # refresh a bit earlier
- self._token_expires_at = datetime.now() + timedelta(seconds=max(expires_in - 60, 60))
+ if token:
+ ttl = max(expires_in - 300, 60)
+ await set_cached_token(cache_key, token, ttl)
+ self._access_token = token
+ self._token_expires_at = datetime.now() + timedelta(seconds=max(expires_in - 60, 60))
return token
async def fetch_departments(self) -> list[ExternalDepartment]:
@@ -952,9 +965,23 @@ def api_base_url(self) -> str:
return self.WECOM_API_URL
async def get_access_token(self) -> str:
- """Get valid access token for WeCom API."""
- if self._access_token and self._token_expires_at and datetime.now() < self._token_expires_at:
- return self._access_token
+ """Get valid access token for WeCom API.
+
+ Cached in Redis (preferred) with in-memory fallback.
+ Key: clawith:token:wecom:{corp_id}
+ TTL: expires_in - 300s (5 min early refresh)
+ """
+ from app.core.token_cache import get_cached_token, set_cached_token
+
+ # Determine identifier for cache key
+ _corp_key = self.corp_id or self.bot_id or ""
+ cache_key = f"clawith:token:wecom:{_corp_key}"
+
+ if _corp_key:
+ cached = await get_cached_token(cache_key)
+ if cached:
+ self._access_token = cached
+ return cached
# Priority 1: Standard CorpID + Secret
if self.corp_id and self.secret:
@@ -967,8 +994,11 @@ async def get_access_token(self) -> str:
if data.get("errcode") == 0:
token = data.get("access_token") or ""
expires_in = int(data.get("expires_in") or 7200)
- self._access_token = token
- self._token_expires_at = datetime.now() + timedelta(seconds=max(expires_in - 300, 300))
+ if token:
+ ttl = max(expires_in - 300, 300)
+ await set_cached_token(cache_key, token, ttl)
+ self._access_token = token
+ self._token_expires_at = datetime.now() + timedelta(seconds=ttl)
return token
else:
logger.error(f"[WeCom Sync] Token error with corp_id: {data}")
@@ -984,8 +1014,11 @@ async def get_access_token(self) -> str:
if data.get("errcode") == 0:
token = data.get("access_token") or ""
expires_in = int(data.get("expires_in") or 7200)
- self._access_token = token
- self._token_expires_at = datetime.now() + timedelta(seconds=max(expires_in - 300, 300))
+ if token:
+ ttl = max(expires_in - 300, 300)
+ await set_cached_token(cache_key, token, ttl)
+ self._access_token = token
+ self._token_expires_at = datetime.now() + timedelta(seconds=ttl)
return token
raise ValueError("WeCom credentials (corp_id/secret or bot_id/secret) missing or invalid")
diff --git a/backend/app/services/platform_service.py b/backend/app/services/platform_service.py
index 7bdc61fa..d15e482d 100644
--- a/backend/app/services/platform_service.py
+++ b/backend/app/services/platform_service.py
@@ -31,12 +31,25 @@ async def get_public_base_url(self, db: AsyncSession | None = None, request: Req
if env_url:
return env_url.rstrip("/")
- # 2. Fallback to request (browser address)
+ # 2. Try database system_settings
+ if db:
+ try:
+ from app.models.system_settings import SystemSetting
+ result = await db.execute(
+ select(SystemSetting).where(SystemSetting.key == "platform")
+ )
+ setting = result.scalar_one_or_none()
+ if setting and setting.value and setting.value.get("public_base_url"):
+ return setting.value["public_base_url"].rstrip("/")
+ except Exception:
+ pass
+
+ # 3. Fallback to request (browser address)
if request:
# Note: request.base_url might include trailing slash
return str(request.base_url).rstrip("/")
- # 3. Absolute fallback
+ # 4. Absolute fallback
return "https://try.clawith.ai"
diff --git a/backend/app/services/sso_service.py b/backend/app/services/sso_service.py
index 296a0be3..f292ab49 100644
--- a/backend/app/services/sso_service.py
+++ b/backend/app/services/sso_service.py
@@ -37,10 +37,12 @@ async def match_user_by_email(
User if found, None otherwise
"""
# 1. Try direct match via Identity join
+ from sqlalchemy.orm import selectinload
query = (
select(User)
.join(User.identity)
.where(Identity.email == email)
+ .options(selectinload(User.identity))
)
if tenant_id:
query = query.where(User.tenant_id == tenant_id)
@@ -82,10 +84,12 @@ async def match_user_by_mobile(
return None
# 1. Try direct match via Identity join
+ from sqlalchemy.orm import selectinload as _sinload
query = (
select(User)
.join(User.identity)
.where(Identity.phone == normalized_mobile)
+ .options(_sinload(User.identity))
)
if tenant_id:
query = query.where(User.tenant_id == tenant_id)
diff --git a/backend/app/services/tool_seeder.py b/backend/app/services/tool_seeder.py
index 45bacdd7..6bb10c37 100644
--- a/backend/app/services/tool_seeder.py
+++ b/backend/app/services/tool_seeder.py
@@ -1350,6 +1350,34 @@
"config": {},
"config_schema": {},
},
+ {
+ "name": "sql_execute",
+ "display_name": "SQL Execute",
+ "description": "Connect to any SQL database and execute queries or statements. Supports MySQL, PostgreSQL, SQLite. Pass a standard connection URI and SQL statement.",
+ "category": "database",
+ "icon": "🗄️",
+ "is_default": False,
+ "parameters_schema": {
+ "type": "object",
+ "properties": {
+ "connection_string": {
+ "type": "string",
+ "description": "Database connection URI, e.g. mysql://user:pass@host:3306/db, postgresql://user:pass@host:5432/db, sqlite:///path/to/file.db",
+ },
+ "sql": {
+ "type": "string",
+ "description": "SQL statement to execute (SELECT, INSERT, UPDATE, DELETE, DDL, etc.)",
+ },
+ "timeout": {
+ "type": "integer",
+ "description": "Query timeout in seconds (default 30, max 120)",
+ },
+ },
+ "required": ["connection_string", "sql"],
+ },
+ "config": {},
+ "config_schema": {},
+ },
]
# ── AgentBay Tools ──────────────────────────────────────────────────────────
diff --git a/backend/app/services/trigger_daemon.py b/backend/app/services/trigger_daemon.py
index be052566..4853191f 100644
--- a/backend/app/services/trigger_daemon.py
+++ b/backend/app/services/trigger_daemon.py
@@ -478,11 +478,12 @@ async def on_tool_call(data):
try:
async with async_session() as _tc_db:
if data["status"] == "running":
+ from app.utils.sanitize import sanitize_tool_args
_tc_db.add(ChatMessage(
agent_id=agent_id,
conversation_id=str(session_id),
role="tool_call",
- content=_json.dumps({"name": data["name"], "args": data["args"]}, ensure_ascii=False, default=str),
+ content=_json.dumps({"name": data["name"], "args": sanitize_tool_args(data.get("args"))}, ensure_ascii=False, default=str),
user_id=agent.creator_id,
participant_id=agent_participant_id,
))
diff --git a/backend/app/services/wecom_stream.py b/backend/app/services/wecom_stream.py
index 28dafc1e..3e041e54 100644
--- a/backend/app/services/wecom_stream.py
+++ b/backend/app/services/wecom_stream.py
@@ -300,7 +300,9 @@ async def _process_wecom_stream_message(
# Find or create platform user
wc_username = f"wecom_{sender_id}"
- query = _select(UserModel).where(UserModel.username == wc_username)
+ from app.models.user import Identity as _IdentityModel
+ from sqlalchemy.orm import selectinload as _selectinload
+ query = _select(UserModel).join(UserModel.identity).where(_IdentityModel.username == wc_username).options(_selectinload(UserModel.identity))
if agent and agent.tenant_id:
query = query.where(UserModel.tenant_id == agent.tenant_id)
@@ -308,14 +310,20 @@ async def _process_wecom_stream_message(
platform_user = u_r.scalar_one_or_none()
if not platform_user:
- platform_user = UserModel(
- username=wc_username,
+ from app.services.registration_service import registration_service as _reg_svc
+ _wc_identity = await _reg_svc.find_or_create_identity(
+ db,
email=f"{wc_username}@wecom.local",
- password_hash=hash_password(str(_uuid.uuid4())),
+ username=wc_username,
+ password=str(_uuid.uuid4()),
+ )
+ platform_user = UserModel(
+ identity_id=_wc_identity.id,
display_name=f"WeCom {sender_id[:8]}",
role="member",
tenant_id=agent.tenant_id if agent else None,
registration_source="wecom",
+ is_active=True,
)
db.add(platform_user)
diff --git a/backend/app/utils/__init__.py b/backend/app/utils/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/backend/app/utils/sanitize.py b/backend/app/utils/sanitize.py
new file mode 100644
index 00000000..c91910d1
--- /dev/null
+++ b/backend/app/utils/sanitize.py
@@ -0,0 +1,84 @@
+"""Sanitize sensitive fields from tool call arguments before sending to clients."""
+
+import re
+from copy import deepcopy
+from urllib.parse import urlparse, urlunparse
+
+# Field names whose values should be completely hidden (replaced with "******")
+SENSITIVE_FIELD_NAMES = {
+ "password", "secret", "token", "api_key", "apikey", "api_secret",
+ "access_token", "refresh_token", "private_key", "secret_key",
+ "authorization", "credentials", "auth",
+ # Connection/credential strings — hide entirely, not partially
+ "connection_string", "database_url", "db_url", "dsn", "uri",
+ "connection_uri", "jdbc_url", "mongo_uri", "redis_url",
+}
+
+
+def sanitize_tool_args(args: dict | None) -> dict | None:
+ """Return a sanitized copy of tool call arguments.
+
+ - Fields matching SENSITIVE_FIELD_NAMES are replaced with "******"
+ - Values that look like connection URIs are also replaced with "******"
+ - Original dict is NOT modified (returns a deep copy)
+ """
+ if not args:
+ return args
+
+ sanitized = deepcopy(args)
+
+ for key in list(sanitized.keys()):
+ key_lower = key.lower()
+
+ # Fully mask sensitive fields by name
+ if key_lower in SENSITIVE_FIELD_NAMES:
+ sanitized[key] = "******"
+ continue
+
+ # Fully mask values that look like connection URIs regardless of field name
+ if isinstance(sanitized[key], str) and _looks_like_connection_uri(sanitized[key]):
+ sanitized[key] = "******"
+
+ # Special case: hide content when writing to secrets.md
+ path_val = sanitized.get("path", "") or ""
+ if _is_secrets_file_path(path_val):
+ if "content" in sanitized:
+ sanitized["content"] = "******"
+
+ return sanitized
+
+
+def _is_secrets_file_path(path: str) -> bool:
+ """Check if a path references secrets.md."""
+ normalized = path.strip("/")
+ return normalized == "secrets.md" or normalized.endswith("/secrets.md")
+
+
+def _mask_uri_password(uri: str) -> str:
+ """Mask the password portion of a connection URI.
+
+ mysql://user:secret123@host:3306/db -> mysql://user:******@host:3306/db
+ """
+ try:
+ parsed = urlparse(uri)
+ if parsed.password:
+ # Reconstruct with masked password
+ netloc = parsed.hostname or ""
+ if parsed.port:
+ netloc = f"{netloc}:{parsed.port}"
+ if parsed.username:
+ netloc = f"{parsed.username}:******@{netloc}"
+ return urlunparse((parsed.scheme, netloc, parsed.path, parsed.params, parsed.query, parsed.fragment))
+ except Exception:
+ pass
+
+ # Fallback: regex-based masking for non-standard URIs
+ return re.sub(r'(://[^:]+:)[^@]+(@)', r'\1******\2', uri)
+
+
+def _looks_like_connection_uri(value: str) -> bool:
+ """Check if a string value looks like a database connection URI."""
+ prefixes = ("mysql://", "postgresql://", "postgres://", "sqlite://",
+ "mongodb://", "redis://", "mssql://", "oracle://",
+ "mysql+", "postgresql+", "postgres+")
+ return any(value.lower().startswith(p) for p in prefixes)
diff --git a/backend/pyproject.toml b/backend/pyproject.toml
index ba663932..e26166e2 100644
--- a/backend/pyproject.toml
+++ b/backend/pyproject.toml
@@ -8,6 +8,8 @@ dependencies = [
"uvicorn[standard]>=0.30.0",
"sqlalchemy[asyncio]>=2.0.0",
"asyncpg>=0.30.0",
+ "aiomysql>=0.2.0",
+ "aiosqlite>=0.20.0",
"alembic>=1.14.0",
"redis[hiredis]>=5.0.0",
"pydantic>=2.0.0",
diff --git a/frontend/package-lock.json b/frontend/package-lock.json
index 7316ed86..9cdd72c4 100644
--- a/frontend/package-lock.json
+++ b/frontend/package-lock.json
@@ -12,6 +12,7 @@
"@tanstack/react-query": "^5.0.0",
"i18next": "^24.0.0",
"i18next-browser-languagedetector": "^8.2.1",
+ "lucide-react": "^1.7.0",
"react": "^19.0.0",
"react-dom": "^19.0.0",
"react-i18next": "^15.0.0",
@@ -1907,6 +1908,15 @@
"yallist": "^3.0.2"
}
},
+ "node_modules/lucide-react": {
+ "version": "1.7.0",
+ "resolved": "https://registry.npmmirror.com/lucide-react/-/lucide-react-1.7.0.tgz",
+ "integrity": "sha512-yI7BeItCLZJTXikmK4KNUGCKoGzSvbKlfCvw44bU4fXAL6v3gYS4uHD1jzsLkfwODYwI6Drw5Tu9Z5ulDe0TSg==",
+ "license": "ISC",
+ "peerDependencies": {
+ "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0"
+ }
+ },
"node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
diff --git a/frontend/package.json b/frontend/package.json
index 887cdde2..58ce0ca2 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -13,6 +13,7 @@
"@tanstack/react-query": "^5.0.0",
"i18next": "^24.0.0",
"i18next-browser-languagedetector": "^8.2.1",
+ "lucide-react": "^1.7.0",
"react": "^19.0.0",
"react-dom": "^19.0.0",
"react-i18next": "^15.0.0",
diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx
index 06b92c39..ddc1a203 100644
--- a/frontend/src/App.tsx
+++ b/frontend/src/App.tsx
@@ -2,6 +2,7 @@ import { Routes, Route, Navigate } from 'react-router-dom';
import { useAuthStore } from './stores';
import { useEffect, useState, useRef } from 'react';
import { authApi } from './services/api';
+import { X } from 'lucide-react';
import Login from './pages/Login';
import ForgotPassword from './pages/ForgotPassword';
import ResetPassword from './pages/ResetPassword';
diff --git a/frontend/src/components/ChannelConfig.tsx b/frontend/src/components/ChannelConfig.tsx
index 1d3e04b4..44955ef1 100644
--- a/frontend/src/components/ChannelConfig.tsx
+++ b/frontend/src/components/ChannelConfig.tsx
@@ -2,6 +2,8 @@ import { useState, type ReactNode } from 'react';
import { useTranslation } from 'react-i18next';
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
import { channelApi } from '../services/api';
+import { Cloud } from 'lucide-react';
+import { copyToClipboard } from '../utils/clipboard';
import LinearCopyButton from './LinearCopyButton';
// ─── Shared fetchAuth (same as AgentDetail) ─────────────
function fetchAuth(url: string, options?: RequestInit): Promise {
@@ -80,7 +82,7 @@ const DingTalkIcon = ;
-const AgentBayIcon = 🌩️ ;
+const AgentBayIcon = ;
// Eye icons for password toggle
const EyeOpen = ;
diff --git a/frontend/src/components/ErrorBoundary.tsx b/frontend/src/components/ErrorBoundary.tsx
index 4813f474..aa715d07 100644
--- a/frontend/src/components/ErrorBoundary.tsx
+++ b/frontend/src/components/ErrorBoundary.tsx
@@ -1,4 +1,5 @@
import React, { Component, ErrorInfo, ReactNode } from 'react';
+import { AlertTriangle } from 'lucide-react';
interface Props {
children?: ReactNode;
@@ -32,7 +33,7 @@ class ErrorBoundary extends Component {
return (
- ⚠️ Oops, something went wrong.
+ Oops, something went wrong.
An unexpected error occurred. You can try refreshing the page or contact support if the problem persists.
diff --git a/frontend/src/components/FileBrowser.tsx b/frontend/src/components/FileBrowser.tsx
index 9d852d7a..363d79c2 100644
--- a/frontend/src/components/FileBrowser.tsx
+++ b/frontend/src/components/FileBrowser.tsx
@@ -6,6 +6,7 @@
*/
import { useState, useEffect, useCallback, useRef } from 'react';
import { useTranslation } from 'react-i18next';
+import { Folder, Pencil, Download, Upload } from 'lucide-react';
import MarkdownRenderer from './MarkdownRenderer';
// ─── Types ─────────────────────────────────────────────
@@ -262,7 +263,7 @@ export default function FileBrowser({
style={{ cursor: 'pointer', color: 'var(--accent-primary)', fontWeight: 500 }}
onClick={() => { setCurrentPath(rootPath); setViewing(null); setEditing(false); }}
>
- 📁 {rootPath || 'root'}
+ {rootPath || 'root'}
{pathParts.slice(rootPath ? rootPath.split('/').filter(Boolean).length : 0).map((part, i) => {
const upTo = pathParts.slice(0, (rootPath ? rootPath.split('/').filter(Boolean).length : 0) + i + 1).join('/');
@@ -401,7 +402,7 @@ export default function FileBrowser({
{isText && edit && (
!editing ? (
{ setEditContent(content); setEditing(true); }}>✏️ {t('agent.soul.editButton')}
+ onClick={() => { setEditContent(content); setEditing(true); }}> {t('agent.soul.editButton')}
) : (
@@ -475,12 +476,12 @@ export default function FileBrowser({
{renderBreadcrumbs()}
{upload && api.upload && (
-
⬆ Upload
+
Upload
)}
{newFolder && (
setPromptModal({ title: t('agent.workspace.newFolder'), placeholder: t('agent.workspace.newFolderName'), action: 'newFolder' })}>
- 📁 {t('agent.workspace.newFolder')}
+ {t('agent.workspace.newFolder')}
)}
{newFile && !fileFilter && (
@@ -504,7 +505,7 @@ export default function FileBrowser({
) : uploadProgress ? (
- ⬆
+
{uploadProgress.fileName}
{uploadProgress.percent}%
@@ -555,7 +556,7 @@ export default function FileBrowser({
onClick={(e) => e.stopPropagation()}
title={t('common.download', 'Download')}
style={{ padding: '2px 6px', fontSize: '11px', color: 'var(--accent-primary)', textDecoration: 'none', borderRadius: '4px' }}>
- ⬇
+
)}
{canDelete && (
diff --git a/frontend/src/i18n/en.json b/frontend/src/i18n/en.json
index 45d00e48..bd15a229 100644
--- a/frontend/src/i18n/en.json
+++ b/frontend/src/i18n/en.json
@@ -187,12 +187,12 @@
"creating": "Creating",
"error": "Error",
"disconnected": "Disconnected",
- "24hActions": "⏰ 24h Actions",
+ "24hActions": "24h Actions",
"24hActionsTooltip": "Total recorded operations in the past 24 hours, including chats, tool calls, task executions, etc.",
- "llmCallsToday": "🤖 LLM Calls Today",
+ "llmCallsToday": "LLM Calls Today",
"max": "Max",
- "totalToken": "📊 Total Token",
- "pending": "⏳ Pending"
+ "totalToken": "Total Token",
+ "pending": "Pending"
},
"tabs": {
"status": "Status",
@@ -210,26 +210,26 @@
},
"fields": {
"name": "Name",
- "role": "👤 Role",
+ "role": "Role",
"avatar": "Avatar",
"personality": "Personality",
"boundaries": "Boundaries",
"lastActive": "Last Active",
"tasksInProgress": "In Progress",
"supervisions": "Supervisions",
- "createdBy": "👨💼 Created by"
+ "createdBy": "Created by"
},
"profile": {
- "title": "📋 Agent Profile",
- "created": "📅 Created",
- "lastActive": "⏰ Last Active",
- "timezone": "🌐 Timezone"
+ "title": "Agent Profile",
+ "created": "Created",
+ "lastActive": "Last Active",
+ "timezone": "Timezone"
},
"modelConfig": {
- "title": "🤖 Model Config",
- "model": "🧠 Model",
- "provider": "🏢 Provider",
- "contextRounds": "🔄 Context Rounds"
+ "title": "Model Config",
+ "model": "Model",
+ "provider": "Provider",
+ "contextRounds": "Context Rounds"
},
"actions": {
"start": "Start",
@@ -326,7 +326,9 @@
"soulDesc": "Core identity, personality, and behavior boundaries.",
"memoryDesc": "Persistent memory accumulated through conversations and experiences.",
"heartbeatDesc": "Instructions for periodic awareness checks. The agent reads this file during each heartbeat.",
- "heartbeatTitle": "Heartbeat"
+ "heartbeatTitle": "Heartbeat",
+ "secretsTitle": "Secrets",
+ "secretsDesc": "Sensitive credentials (passwords, API keys, connection strings). Only visible to the agent creator."
},
"skills": {
"title": "Skill Definitions",
@@ -720,7 +722,7 @@
"delete": "Delete",
"edit": "Edit",
"cancel": "Cancel",
- "supportsVision": "👁 Supports Vision (Multimodal)",
+ "supportsVision": "Supports Vision (Multimodal)",
"supportsVisionDesc": "— Enable for models that can analyze images (GPT-4o, Claude, Qwen-VL, etc.)",
"test": "Test",
"testing": "Testing...",
@@ -833,7 +835,11 @@
"step4": "Paste CorpID and Secret below → click Save",
"step5": "If you need SSO via WeCom: Turn on the 'SSO Login' switch below, then in WeCom configure: App Management → Webpage Authorization → Set Trusted Domain to your URL domain. Share the 'SSO Login URL' with your team."
}
- }
+ },
+ "companyUrl": "Company URL",
+ "memberEmail": "Email",
+ "memberPhone": "Phone",
+ "memberSource": "Source"
},
"config": {
"title": "Platform Configuration",
@@ -885,7 +891,7 @@
"title": "Notification Bar",
"enabled": "Enable notification bar",
"text": "Notification text",
- "textPlaceholder": "e.g. 🎉 v2.1 released with new features!",
+ "textPlaceholder": "e.g. v2.1 released with new features!",
"description": "Display a notification bar at the top of the page, visible to all users.",
"preview": "Preview"
},
@@ -904,6 +910,56 @@
"oauth2": "OAuth2: Enter App ID, App Secret, Authorize URL, Token URL, User Info URL",
"saml": "SAML: Requires entry_point, issuer, cert (public key).",
"microsoft_teams": "Microsoft Teams: Requires client_id, client_secret, tenant_id."
+ },
+ "description": "Configure enterprise directory synchronization and Identity Provider settings.",
+ "statusActive": "Active",
+ "statusNotConfigured": "Not configured",
+ "syncDirectory": "Sync Directory",
+ "syncSyncing": "Syncing...",
+ "syncComplete": "Sync complete: {{users_created}} users created, {{profiles_synced}} profiles synced.",
+ "syncError": "Error: {{error}}",
+ "deleteConfirmProvider": "Are you sure you want to delete this configuration?",
+ "savedStatus": "Saved",
+ "feishuDesc": "Feishu / Lark Integration",
+ "dingtalkDesc": "DingTalk App Integration",
+ "wecomDesc": "WeChat Work Integration",
+ "oauth2Desc": "Generic OIDC Provider",
+ "ssoLoginToggle": "SSO Login",
+ "ssoLoginToggleHint": "Allow users to log in via this identity provider.",
+ "ssoSubdomain": "SSO Login URL",
+ "ssoSubdomainHint": "Share this URL with your team. SSO login buttons will appear when they visit this address.",
+ "callbackUrl": "Redirect URL (paste this in your app settings)",
+ "callbackUrlHint": "Add this URL as the OAuth redirect URI in your identity provider's app configuration.",
+ "saveFirst": "Please save the configuration first to enable SSO.",
+ "oauth2": "OAuth2",
+ "clientId": "Client ID",
+ "clientSecret": "Client Secret",
+ "authorizeUrl": "Authorize URL",
+ "tokenUrl": "Token URL",
+ "userInfoUrl": "UserInfo URL",
+ "scope": "Scope",
+ "fieldMapping": "Field Mapping",
+ "fieldMappingHint": "Optional, leave empty to use standard OIDC fields",
+ "userIdField": "User ID Field",
+ "nameField": "Name Field",
+ "emailField": "Email Field",
+ "mobileField": "Mobile Field",
+ "clearField": "Clear this field",
+ "clearAllMappings": "Clear All Field Mappings",
+ "hasCustomMapping": "Custom field mapping configured",
+ "authorizeUrlPlaceholder": "https://sso.example.com/oauth2/authorize",
+ "tokenUrlPlaceholder": "Leave empty to auto-derive from Authorize URL",
+ "userInfoUrlPlaceholder": "Leave empty to auto-derive from Authorize URL",
+ "scopePlaceholder": "openid profile email",
+ "userIdFieldPlaceholder": "Default: sub",
+ "nameFieldPlaceholder": "Default: name",
+ "emailFieldPlaceholder": "Default: email",
+ "mobileFieldPlaceholder": "Default: phone_number",
+ "validation": {
+ "clientIdRequired": "Client ID is required",
+ "clientSecretRequired": "Client Secret is required",
+ "authorizeUrlRequired": "Authorize URL is required",
+ "invalidUrl": "Invalid URL format"
}
}
},
@@ -953,14 +1009,14 @@
"setupGuide": "Setup Guide",
"feishuPermJson": "Permission JSON (bulk import)",
"feishuPermCopy": "Copy",
- "feishuPermCopied": "✓ Copied",
+ "feishuPermCopied": "Copied",
"slack": {
"step1": "Go to api.slack.com/apps → click Create New App → choose From Scratch → enter an app name and pick your workspace → click Create App",
"step2": "In the left sidebar, click OAuth & Permissions → scroll down to Scopes → under Bot Token Scopes click Add an OAuth Scope → add: chat:write, im:history, app_mentions:read",
"step3": "Scroll back to the top of OAuth & Permissions → click Install to Workspace → click Allow → copy the Bot User OAuth Token (starts with xoxb-)",
"step4": "Left sidebar → Basic Information → scroll to App Credentials → click Show next to Signing Secret → copy it",
"step5": "Paste the Bot Token and Signing Secret above → click Save Config → copy the Webhook URL that appears",
- "step6": "Left sidebar → Event Subscriptions → toggle Enable Events ON → paste the Webhook URL into the Request URL field (it will say Verified ✓) → under Subscribe to bot events, click Add Bot User Event and add: app_mention, message.im → click Save Changes",
+ "step6": "Left sidebar → Event Subscriptions → toggle Enable Events ON → paste the Webhook URL into the Request URL field (it will say Verified ) → under Subscribe to bot events, click Add Bot User Event and add: app_mention, message.im → click Save Changes",
"step7": "A yellow banner will appear at the top: click reinstall your app and authorize again",
"step8": "Now open a DM with your bot in Slack and send a message to test it",
"note": "Your server must be publicly accessible on the internet so Slack can reach the Webhook URL."
@@ -1036,5 +1092,78 @@
"ws_step6": "Find the bot in WeCom and send a test message",
"ws_note": "WebSocket mode requires no public IP, callback URL, or domain verification (ICP). The connection is managed automatically."
}
+ },
+ "admin": {
+ "platformSettings": "Platform Settings",
+ "platformSettingsDesc": "Manage platform-wide settings and company tenants.",
+ "tab": {
+ "dashboard": "Dashboard",
+ "platform": "Platform",
+ "companies": "Companies"
+ },
+ "allowSelfCreate": "Allow users to create their own companies",
+ "allowSelfCreateDesc": "When disabled, only platform admins can create companies.",
+ "publicUrl": {
+ "title": "Public URL",
+ "desc": "The external URL used for webhook callbacks and published page links. Include the protocol (e.g. https://example.com)."
+ },
+ "company": "Company",
+ "orgAdmin": "Admin Email",
+ "users": "Users",
+ "agents": "Agents",
+ "tokens": "Token Usage",
+ "createdAt": "Created",
+ "status": "Status",
+ "action": "Action",
+ "all": "All",
+ "active": "Active",
+ "disabled": "Disabled",
+ "filterStatus": "Filter by status",
+ "createCompany": "Create Company",
+ "companyNamePlaceholder": "Company name",
+ "companyCreated": "Company Created",
+ "companyCreatedDesc": "has been created successfully.",
+ "inviteCodeLabel": "Admin Invitation Code",
+ "inviteCodeHowTo": "How to use this code:",
+ "inviteCodeExplain": "Send this code to the person who will manage this company. They should register a new account on the platform, then enter this code to join. The first person to use it will automatically become the Org Admin of this company. This code is single-use.",
+ "copyCode": "Copy Code",
+ "copied": "Copied",
+ "cannotDisableDefault": "Cannot disable the default company — platform admin would be locked out",
+ "confirmDisable": "Disable this company? All users and agents will be paused.",
+ "disable": "Disable",
+ "enable": "Enable",
+ "edit": "Edit",
+ "editCompany": "Edit Company",
+ "showing": "{{start}}-{{end}} of {{total}}",
+ "prev": "Prev",
+ "next": "Next",
+ "noFilterResults": "No companies match the current filter.",
+ "domainConfig": "Domain Configuration",
+ "domainConfigDesc": "Configure a dedicated access domain for this company.",
+ "subdomainPrefix": "Subdomain Prefix",
+ "subdomainPrefixPlaceholderHint": "Leave blank to use slug",
+ "prefixAvailable": "Available",
+ "prefixTaken": "Already taken",
+ "prefixChecking": "checking...",
+ "default": "Default",
+ "setAsDefault": "Default Company",
+ "setAsDefaultDesc": "Users visiting the global domain will be associated with this company.",
+ "currentDefault": "Current Default",
+ "prefixTakenError": "The subdomain prefix is already taken. Please choose a different one.",
+ "delete": "Delete",
+ "confirmDelete": "Yes, Delete",
+ "deleteCompany": "Delete Company",
+ "deleteCompanyWarning": "This action is irreversible. The following data will be permanently deleted:",
+ "deleteDataUsers": "All users in this company",
+ "deleteDataAgents": "All agents and their conversations",
+ "deleteDataSkills": "All skills and LLM model configurations",
+ "deleteDataOther": "All invitation codes, org structure, token records",
+ "deleteCompanyName": "Company"
+ },
+ "users": {
+ "newPassword": "New Password",
+ "newPasswordPlaceholder": "Leave blank to keep current password",
+ "newPasswordHint": "Optional. If provided, user password will be reset.",
+ "passwordResetSuccess": "Password has been reset"
}
-}
\ No newline at end of file
+}
diff --git a/frontend/src/i18n/zh.json b/frontend/src/i18n/zh.json
index 4941577e..0fe882f1 100644
--- a/frontend/src/i18n/zh.json
+++ b/frontend/src/i18n/zh.json
@@ -194,12 +194,12 @@
"creating": "创建中",
"error": "异常",
"disconnected": "已断开",
- "24hActions": "⏰ 24h 活动",
+ "24hActions": "24h 活动",
"24hActionsTooltip": "过去 24 小时内该 Agent 的所有操作记录,包括对话、工具调用、任务执行等",
- "llmCallsToday": "🤖 今日 LLM 调用",
+ "llmCallsToday": "今日 LLM 调用",
"max": "上限",
- "totalToken": "📊 总 Token",
- "pending": "⏳ 待处理"
+ "totalToken": "总 Token",
+ "pending": "待处理"
},
"tabs": {
"status": "状态",
@@ -217,26 +217,26 @@
},
"fields": {
"name": "名称",
- "role": "👤 角色",
+ "role": "角色",
"avatar": "头像",
"personality": "人格设定",
"boundaries": "行为边界",
"lastActive": "最后活跃",
"tasksInProgress": "进行中",
"supervisions": "督办",
- "createdBy": "👨💼 创建者"
+ "createdBy": "创建者"
},
"profile": {
- "title": "📋 Agent 档案",
- "created": "📅 创建时间",
- "lastActive": "⏰ 最后活跃",
- "timezone": "🌐 时区"
+ "title": "Agent 档案",
+ "created": "创建时间",
+ "lastActive": "最后活跃",
+ "timezone": "时区"
},
"modelConfig": {
- "title": "🤖 模型配置",
- "model": "🧠 模型",
- "provider": "🏢 提供商",
- "contextRounds": "🔄 上下文轮次"
+ "title": "模型配置",
+ "model": "模型",
+ "provider": "提供商",
+ "contextRounds": "上下文轮次"
},
"actions": {
"start": "启动",
@@ -333,9 +333,10 @@
"soulDesc": "核心身份、人格和行为边界。",
"memoryDesc": "通过对话和经验积累的持久记忆。",
"heartbeatDesc": "定期感知检查的指令。Agent 在每次心跳时读取此文件。",
- "heartbeatTitle": "心跳"
+ "heartbeatTitle": "心跳",
+ "secretsTitle": "机密信息",
+ "secretsDesc": "敏感凭据(密码、API 密钥、连接字符串等)。仅创建者可查看。"
},
-
"skills": {
"title": "技能定义",
"description": "技能定义了数字员工在特定场景下的行为方式。每个.md文件是一个技能。建议使用YAML frontmatter(name + description)来定义技能元数据。",
@@ -478,7 +479,7 @@
"saving": "保存中..."
},
"timezone": {
- "title": "🌐 时区",
+ "title": "时区",
"description": "此数字员工的调度、活跃时间和时间感知使用的时区。如果未设置,默认为公司时区。",
"current": "数字员工时区",
"override": "此数字员工的自定义时区",
@@ -762,7 +763,6 @@
"disable": "禁用",
"exportCsv": "导出 CSV"
},
-
"stats": {
"users": "{{count}} 用户",
"runningAgents": "{{running}}/{{total}} 运行中"
@@ -791,7 +791,7 @@
"delete": "删除",
"edit": "编辑",
"cancel": "取消",
- "supportsVision": "👁 支持视觉(多模态)",
+ "supportsVision": "支持视觉(多模态)",
"supportsVisionDesc": "— 为能够分析图像的模型启用(GPT-4o、Claude、Qwen-VL 等)",
"test": "测试",
"testing": "测试中...",
@@ -904,7 +904,11 @@
"step4": "将 CorpID 和 Secret 填入下方 → 点击保存配置",
"step5": "如需使用企微免登:打开下方的SSO登录开关,然后在企微中配置:应用管理 → 网页授权及JS-SDK → 设置可信域名(即下方 Redirect URL 的域名部分)。分享下方的 SSO Login URL 给团队成员即可登录。"
}
- }
+ },
+ "companyUrl": "企业访问地址",
+ "memberEmail": "邮箱",
+ "memberPhone": "手机",
+ "memberSource": "来源"
},
"config": {
"title": "平台配置",
@@ -967,9 +971,62 @@
"title": "顶部通知条",
"enabled": "启用通知条",
"text": "通知文案",
- "textPlaceholder": "输入通知条文案,如:🎉 v2.1 已发布!",
+ "textPlaceholder": "输入通知条文案,如: v2.1 已发布!",
"description": "在页面顶部显示一行通知条,所有用户(含未登录用户)可见。",
"preview": "预览"
+ },
+ "identity": {
+ "title": "组织架构同步",
+ "description": "配置企业目录同步和身份提供商设置。",
+ "statusActive": "已配置",
+ "statusNotConfigured": "未配置",
+ "syncDirectory": "同步通讯录",
+ "syncSyncing": "同步中...",
+ "syncComplete": "同步完成:创建 {{users_created}} 个用户,同步 {{profiles_synced}} 个档案。",
+ "syncError": "错误:{{error}}",
+ "deleteConfirmProvider": "确定要删除此配置吗?",
+ "savedStatus": "已保存",
+ "feishuDesc": "飞书集成",
+ "dingtalkDesc": "钉钉集成",
+ "wecomDesc": "企业微信集成",
+ "oauth2Desc": "通用 OIDC 提供商",
+ "ssoLoginToggle": "SSO 登录",
+ "ssoLoginToggleHint": "允许用户通过此身份提供商登录。",
+ "ssoSubdomain": "SSO 登录地址",
+ "ssoSubdomainHint": "将此地址分享给团队成员,访问后会出现 SSO 登录按钮。",
+ "callbackUrl": "回调地址(粘贴到应用设置中)",
+ "callbackUrlHint": "将此 URL 添加为身份提供商应用配置中的 OAuth 重定向 URI。",
+ "saveFirst": "请先保存配置后再启用 SSO。",
+ "oauth2": "OAuth2",
+ "clientId": "Client ID",
+ "clientSecret": "Client Secret",
+ "authorizeUrl": "授权地址",
+ "tokenUrl": "Token 地址",
+ "userInfoUrl": "用户信息地址",
+ "scope": "Scope",
+ "fieldMapping": "字段映射",
+ "fieldMappingHint": "可选,留空使用标准 OIDC 字段",
+ "userIdField": "用户 ID 字段",
+ "nameField": "姓名字段",
+ "emailField": "邮箱字段",
+ "mobileField": "手机号字段",
+ "clearField": "清空此字段",
+ "clearAllMappings": "清空所有字段映射",
+ "hasCustomMapping": "当前已配置自定义字段映射",
+ "authorizeUrlPlaceholder": "https://sso.example.com/oauth2/authorize",
+ "tokenUrlPlaceholder": "留空则从授权地址自动推导",
+ "userInfoUrlPlaceholder": "留空则从授权地址自动推导",
+ "scopePlaceholder": "openid profile email",
+ "userIdFieldPlaceholder": "默认:sub",
+ "nameFieldPlaceholder": "默认:name",
+ "emailFieldPlaceholder": "默认:email",
+ "mobileFieldPlaceholder": "默认:phone_number",
+ "validation": {
+ "clientIdRequired": "Client ID 是必填项",
+ "clientSecretRequired": "Client Secret 是必填项",
+ "authorizeUrlRequired": "授权地址是必填项",
+ "invalidUrl": "URL 格式不正确"
+ }
}
},
"common": {
@@ -1013,21 +1070,21 @@
"inviteCodeRequired": "注册需要邀请码",
"inviteCodeRequiredDesc": "开启后,新用户必须提供有效的邀请码才能注册。",
"companyNamePlaceholder": "公司名称",
- "companyCreated": "公司创建成功",
+ "companyCreated": "公司已创建",
"companyCreatedDesc": "已成功创建。",
"adminInviteCode": "管理员邀请码(仅限一次使用,首位使用者成为公司管理员):",
"inviteCodeLabel": "管理员邀请码",
- "inviteCodeHowTo": "如何使用此邀请码:",
- "inviteCodeExplain": "将此邀请码发送给负责管理该公司的人员。他们需要在平台上注册一个新账号,然后输入此邀请码加入公司。首位使用此邀请码的用户将自动成为该公司的管理员。此邀请码仅限一次使用。",
+ "inviteCodeHowTo": "如何使用此代码:",
+ "inviteCodeExplain": "将此代码发送给将管理该公司的人员。他们应在平台上注册新账号,然后输入此代码加入。第一个使用它的人将自动成为该公司的组织管理员。此代码为一次性使用。",
"copyCode": "复制邀请码",
"copied": "已复制",
- "cannotDisableDefault": "无法停用默认公司 -- 平台管理员将无法登录",
+ "cannotDisableDefault": "无法禁用默认公司 — 平台管理员将被锁定",
"company": "公司",
- "users": "用户数",
+ "users": "用户",
"agents": "数字员工",
- "tokens": "Token 用量",
+ "tokens": "Token 消耗",
"status": "状态",
- "active": "正常",
+ "active": "已启用",
"disabled": "已停用",
"disable": "停用",
"editCompany": "编辑公司",
@@ -1037,10 +1094,49 @@
"ssoDomain": "专属访问域名",
"ssoDomainPlaceholder": "例如: acme.clawith.com",
"confirmDisable": "停用此公司?所有用户和数字员工将被暂停。",
+ "createdAt": "创建时间",
+ "enable": "启用",
"showing": "{{start}}-{{end}} / {{total}}",
"prev": "上一页",
- "next": "下一页"
+ "next": "下一页",
+ "action": "操作",
+ "orgAdmin": "管理员邮箱",
+ "platformSettingsDesc": "管理平台级设置和公司租户。",
+ "tab": {
+ "dashboard": "总览",
+ "platform": "平台",
+ "companies": "公司"
+ },
+ "publicUrl": {
+ "title": "公开访问地址",
+ "desc": "用于 Webhook 回调和发布页面链接的外部 URL。请包含协议(如 https://example.com)。"
+ },
+ "noFilterResults": "当前筛选条件下没有公司。",
+ "filterStatus": "按状态筛选",
+ "all": "全部",
+ "domainConfig": "域名配置",
+ "domainConfigDesc": "为企业配置专属访问域名。",
+ "subdomainPrefix": "子域名前缀",
+ "subdomainPrefixPlaceholderHint": "留空则使用企业标识",
+ "prefixAvailable": "可用",
+ "prefixTaken": "已被占用",
+ "prefixChecking": "检查中...",
+ "default": "默认",
+ "setAsDefault": "默认企业",
+ "setAsDefaultDesc": "通过全局域名访问的用户将自动关联到此企业。",
+ "currentDefault": "当前默认",
+ "prefixTakenError": "子域名前缀已被占用,请选择其他前缀。",
+ "edit": "编辑",
+ "delete": "删除",
+ "confirmDelete": "确认删除",
+ "deleteCompany": "删除公司",
+ "deleteCompanyWarning": "此操作不可逆。以下数据将被永久删除:",
+ "deleteDataUsers": "该公司所有用户",
+ "deleteDataAgents": "所有数字员工及其对话记录",
+ "deleteDataSkills": "所有技能和 LLM 模型配置",
+ "deleteDataOther": "所有邀请码、组织架构、Token 记录",
+ "deleteCompanyName": "公司"
},
"companySetup": {
"title": "设置你的公司",
@@ -1084,14 +1180,14 @@
"setupGuide": "配置说明",
"feishuPermJson": "权限批量导入 JSON",
"feishuPermCopy": "复制",
- "feishuPermCopied": "✓ 已复制",
+ "feishuPermCopied": "已复制",
"slack": {
"step1": "打开 api.slack.com/apps → 点击 Create New App → 选择 From Scratch → 填写应用名称并选择你的 Workspace → 点击 Create App",
"step2": "左侧菜单点击 OAuth & Permissions → 向下滚动到 Scopes → 在 Bot Token Scopes 下点击 Add an OAuth Scope → 依次添加:chat:write、im:history、app_mentions:read",
"step3": "回到 OAuth & Permissions 顶部 → 点击 Install to Workspace → 点击 Allow → 复制 Bot User OAuth Token(以 xoxb- 开头)",
"step4": "左侧菜单 → Basic Information → 滚动到 App Credentials → 点击 Signing Secret 旁的 Show → 复制",
"step5": "将 Bot Token 和 Signing Secret 填入上方 → 点击保存配置 → 复制出现的 Webhook URL",
- "step6": "左侧菜单 → Event Subscriptions → 打开 Enable Events 开关 → 将 Webhook URL 粘贴到 Request URL 框中(显示 Verified ✓ 后继续)→ 在 Subscribe to bot events 下点击 Add Bot User Event,依次添加:app_mention、message.im → 点击 Save Changes",
+ "step6": "左侧菜单 → Event Subscriptions → 打开 Enable Events 开关 → 将 Webhook URL 粘贴到 Request URL 框中(显示 Verified 后继续)→ 在 Subscribe to bot events 下点击 Add Bot User Event,依次添加:app_mention、message.im → 点击 Save Changes",
"step7": "页面顶部会出现黄色横幅:点击 reinstall your app 并重新授权",
"step8": "在 Slack 中找到你的 Bot,发一条私信测试是否正常回复",
"note": "服务器必须能被公网访问,Slack 才能将消息推送到 Webhook URL。"
@@ -1182,6 +1278,13 @@
"wecom": "企业微信:传统模式需 corp_id, secret, agent_id;智能机器人模式需 bot_id, bot_secret",
"oauth2": "OAuth2:填写 App ID、App Secret、授权地址、Token地址、用户信息地址",
"saml": "SAML:需填写 entry_point, issuer, cert (公钥)",
- "microsoft_teams": "Microsoft Teams:需填写 client_id, client_secret, tenant_id" }
+ "microsoft_teams": "Microsoft Teams:需填写 client_id, client_secret, tenant_id"
+ }
+ },
+ "users": {
+ "newPassword": "新密码",
+ "newPasswordPlaceholder": "留空则不修改密码",
+ "newPasswordHint": "可选。填写后将重置用户密码。",
+ "passwordResetSuccess": "密码已重置"
}
}
diff --git a/frontend/src/pages/AdminCompanies.tsx b/frontend/src/pages/AdminCompanies.tsx
index 884b0f62..7b0c18f5 100644
--- a/frontend/src/pages/AdminCompanies.tsx
+++ b/frontend/src/pages/AdminCompanies.tsx
@@ -4,6 +4,7 @@ import { adminApi } from '../services/api';
import { useAuthStore } from '../stores';
import { saveAccentColor, getSavedAccentColor } from '../utils/theme';
import { IconFilter } from '@tabler/icons-react';
+import { ShieldCheck } from 'lucide-react';
import PlatformDashboard from './PlatformDashboard';
import LinearCopyButton from '../components/LinearCopyButton';
// Helper for authenticated JSON fetch
@@ -109,6 +110,11 @@ function PlatformTab() {
const [nbSaving, setNbSaving] = useState(false);
const [nbSaved, setNbSaved] = useState(false);
+ // Public URL
+ const [publicBaseUrl, setPublicBaseUrl] = useState('');
+ const [urlSaving, setUrlSaving] = useState(false);
+ const [urlSaved, setUrlSaved] = useState(false);
+
// System email configuration
const [systemEmailConfig, setSystemEmailConfig] = useState({
@@ -162,6 +168,11 @@ function PlatformTab() {
setNbText(d.value.text || '');
}
}).catch(() => { });
+ // Load Public URL
+ fetchJson
('/enterprise/system-settings/platform')
+ .then(d => {
+ if (d.value?.public_base_url) setPublicBaseUrl(d.value.public_base_url);
+ }).catch(() => { });
// Load System Email
fetchJson('/enterprise/system-settings/system_email_platform')
@@ -218,6 +229,21 @@ function PlatformTab() {
setNbSaving(false);
};
+ const savePublicUrl = async () => {
+ setUrlSaving(true);
+ try {
+ await fetchJson('/enterprise/system-settings/platform', {
+ method: 'PUT',
+ body: JSON.stringify({ value: { public_base_url: publicBaseUrl } }),
+ });
+ setUrlSaved(true);
+ setTimeout(() => setUrlSaved(false), 2000);
+ } catch (e) {
+ showToast('Failed to save', 'error');
+ }
+ setUrlSaving(false);
+ };
+
const saveEmailConfig = async () => {
setEmailConfigSaving(true);
@@ -380,6 +406,31 @@ function PlatformTab() {
+ {/* Public URL */}
+
+
+ {t('admin.publicUrl.title', 'Public URL')}
+
+
+ {t('admin.publicUrl.desc', 'The external URL used for webhook callbacks (Slack, Feishu, Discord, etc.) and published page links. Include the protocol (e.g. https://example.com).')}
+
+
+ setPublicBaseUrl(e.target.value)}
+ placeholder="https://your-domain.com"
+ style={{ fontSize: '13px' }}
+ />
+
+
+
+ {urlSaving ? t('common.loading') : t('common.save', 'Save')}
+
+ {urlSaved && {t('enterprise.config.saved', 'Saved')} }
+
+
+
{/* System Email Configuration */}
@@ -672,6 +723,19 @@ function CompaniesTab() {
const [createdCode, setCreatedCode] = useState('');
const [createdCompanyName, setCreatedCompanyName] = useState('');
+ // Edit company modal
+ const [editingCompany, setEditingCompany] = useState
(null);
+ const [deletingCompanyId, setDeletingCompanyId] = useState(null);
+ const [deleteConfirmCompany, setDeleteConfirmCompany] = useState(null);
+ const [publicBaseUrl, setPublicBaseUrl] = useState('');
+
+ // Invitation codes modal
+ const [codesModal, setCodesModal] = useState<{ companyId: string; companyName: string } | null>(null);
+ const [companyCodes, setCompanyCodes] = useState([]);
+ const [loadingCodes, setLoadingCodes] = useState(false);
+ const [newlyGeneratedCode, setNewlyGeneratedCode] = useState('');
+ const [codeCopied2, setCodeCopied2] = useState(false);
+
// Toast
const [toast, setToast] = useState<{ msg: string; type: 'success' | 'error' } | null>(null);
const showToast = (msg: string, type: 'success' | 'error' = 'success') => {
@@ -692,6 +756,9 @@ function CompaniesTab() {
useEffect(() => {
loadCompanies();
+ fetchJson('/enterprise/system-settings/platform')
+ .then(d => { if (d.value?.public_base_url) setPublicBaseUrl(d.value.public_base_url); })
+ .catch(() => {});
}, []);
// Sorting logic
@@ -748,6 +815,31 @@ function CompaniesTab() {
+ const handleViewCodes = async (companyId: string, companyName: string) => {
+ setCodesModal({ companyId, companyName });
+ setNewlyGeneratedCode('');
+ setLoadingCodes(true);
+ try {
+ const res = await adminApi.listCompanyCodes(companyId);
+ setCompanyCodes(res.codes || []);
+ } catch { setCompanyCodes([]); }
+ setLoadingCodes(false);
+ };
+
+ const handleGenerateCode = async () => {
+ if (!codesModal) return;
+ try {
+ const res = await adminApi.createCompanyCode(codesModal.companyId);
+ setNewlyGeneratedCode(res.code || '');
+ setCodeCopied2(false);
+ // Refresh list
+ const listRes = await adminApi.listCompanyCodes(codesModal.companyId);
+ setCompanyCodes(listRes.codes || []);
+ } catch (e: any) {
+ showToast(e.message || 'Failed', 'error');
+ }
+ };
+
const handleToggle = async (id: string, currentlyActive: boolean) => {
const action = currentlyActive ? 'disable' : 'enable';
if (currentlyActive && !confirm(t('admin.confirmDisable', 'Disable this company? All users and agents will be paused.'))) return;
@@ -760,6 +852,19 @@ function CompaniesTab() {
}
};
+ const handleDelete = async (company: any) => {
+ setDeletingCompanyId(company.id);
+ try {
+ await adminApi.deleteCompany(company.id);
+ loadCompanies();
+ showToast(`Company "${company.name}" deleted`);
+ } catch (e: any) {
+ showToast(e.message || 'Failed to delete', 'error');
+ }
+ setDeletingCompanyId(null);
+ setDeleteConfirmCompany(null);
+ };
+
// Sort indicator arrow
const SortArrow = ({ col }: { col: SortKey }) => {
if (sortKey !== col) return ↕ ;
@@ -781,7 +886,7 @@ function CompaniesTab() {
{ key: 'created_at', label: t('admin.createdAt', 'Created'), flex: '100px' },
{ key: 'is_active', label: t('admin.status', 'Status'), flex: '100px' },
];
- const actionColFlex = '80px';
+ const actionColFlex = '180px';
const gridCols = columns.map(c => c.flex).join(' ') + ' ' + actionColFlex;
@@ -796,6 +901,73 @@ function CompaniesTab() {
)}
+ {/* Edit Company Modal */}
+ {editingCompany && (
+ setEditingCompany(null)}
+ onUpdated={() => { loadCompanies(); setEditingCompany(null); }}
+ />
+ )}
+
+ {/* Delete Confirmation Modal */}
+ {deleteConfirmCompany && (
+ setDeleteConfirmCompany(null)}>
+
e.stopPropagation()}>
+
+ {t('admin.deleteCompany', 'Delete Company')}
+
+
+ {t('admin.deleteCompanyWarning', 'This action is irreversible. The following data will be permanently deleted:')}
+
+
+ {t('admin.deleteDataUsers', 'All users in this company')}
+ {t('admin.deleteDataAgents', 'All agents and their conversations')}
+ {t('admin.deleteDataSkills', 'All skills and LLM model configurations')}
+ {t('admin.deleteDataOther', 'All invitation codes, org structure, token records')}
+
+
+ {t('admin.deleteCompanyName', 'Company')}:
{deleteConfirmCompany.name}
+
+ {deleteConfirmCompany.user_count} {t('admin.users', 'users')} · {deleteConfirmCompany.agent_count} {t('admin.agents', 'agents')}
+
+
+
+ setDeleteConfirmCompany(null)}
+ disabled={!!deletingCompanyId}>
+ {t('common.cancel', 'Cancel')}
+
+ handleDelete(deleteConfirmCompany)}
+ disabled={!!deletingCompanyId}>
+ {deletingCompanyId === deleteConfirmCompany.id
+ ? t('common.loading', 'Loading...')
+ : t('admin.confirmDelete', 'Yes, Delete')}
+
+
+
+
+ )}
+
{/* Invitation Code Modal */}
{createdCode && (
)}
+ {/* Invitation Codes Modal */}
+ {codesModal && (
+
{ setCodesModal(null); setNewlyGeneratedCode(''); }}>
+
e.stopPropagation()}>
+
+
+ 邀请码 — {codesModal.companyName}
+
+ { setCodesModal(null); setNewlyGeneratedCode(''); }}
+ style={{ background: 'none', border: 'none', cursor: 'pointer', color: 'var(--text-tertiary)' }}>
+
+
+
+
+
+
+ {newlyGeneratedCode && (
+
+
+ 新生成的邀请码
+
+
+ {newlyGeneratedCode}
+ {
+ navigator.clipboard.writeText(newlyGeneratedCode).then(() => {
+ setCodeCopied2(true);
+ setTimeout(() => setCodeCopied2(false), 2000);
+ });
+ }}
+ >
+ {codeCopied2 ? '已复制' : '复制'}
+
+
+
+ )}
+
+
+
+ 有效邀请码
+
+ {loadingCodes ? (
+
加载中...
+ ) : companyCodes.length === 0 ? (
+
暂无有效邀请码
+ ) : (
+
+ {companyCodes.map((c: any) => (
+
+ {c.code}
+
+ {c.used_count}/{c.max_uses === 0 ? '∞' : c.max_uses} 次使用
+
+
+ ))}
+
+ )}
+
+
+
+
+ 生成新邀请码
+
+ { setCodesModal(null); setNewlyGeneratedCode(''); }}
+ style={{ padding: '0 20px' }}>
+ 关闭
+
+
+
+
+ )}
+
{/* Create Company button */}
{ setShowCreate(true); setCreatedCode(''); }}>
@@ -984,13 +1249,25 @@ function CompaniesTab() {
opacity: c.is_active ? 1 : 0.5,
}}>
-
{c.name}
+
+ {c.name}
+ {c.is_default && (
+
+ {t('admin.default', 'Default')}
+
+ )}
+
{c.slug}
{c.sso_enabled ? (
<>
- 🛡️
+
{c.sso_domain && (
+ setEditingCompany(c)}
+ >
+ {t('admin.edit', 'Edit')}
+
+ handleViewCodes(c.id, c.name)}
+ >
+ 邀请码
+
handleToggle(c.id, !!c.is_active)}
- disabled={c.slug === 'default'}
- title={c.slug === 'default' ? t('admin.cannotDisableDefault', 'Cannot disable the default company — platform admin would be locked out') : undefined}
+ disabled={c.is_default}
+ title={c.is_default ? t('admin.cannotDisableDefault', 'Cannot disable the default company — platform admin would be locked out') : undefined}
>
{c.is_active ? t('admin.disable', 'Disable') : t('admin.enable', 'Enable')}
+ !c.is_default && setDeleteConfirmCompany(c)}
+ disabled={c.is_default}
+ title={c.is_default
+ ? t('admin.cannotDeleteDefault', 'Cannot delete the default company')
+ : t('admin.deleteCompany', 'Delete Company')}
+ >
+ {t('admin.delete', 'Delete')}
+
))}
@@ -1083,21 +1390,43 @@ function CompaniesTab() {
}
// ─── Edit Company Modal ───────────────────────────────
-function EditCompanyModal({ company, onClose, onUpdated }: { company: any, onClose: () => void, onUpdated: () => void }) {
+function EditCompanyModal({ company, publicBaseUrl, onClose, onUpdated }: { company: any, publicBaseUrl: string, onClose: () => void, onUpdated: () => void }) {
const { t } = useTranslation();
- const [ssoEnabled, setSsoEnabled] = useState(!!company.sso_enabled);
- const [ssoDomain, setSsoDomain] = useState(company.sso_domain || '');
+ const [subdomainPrefix, setSubdomainPrefix] = useState(company.subdomain_prefix || '');
+ const [prefixStatus, setPrefixStatus] = useState<'idle' | 'checking' | 'available' | 'taken'>('idle');
+ const [isDefault, setIsDefault] = useState(!!company.is_default);
const [saving, setSaving] = useState(false);
const [error, setError] = useState('');
+ const globalHostname = publicBaseUrl ? (() => { try { return new URL(publicBaseUrl).hostname; } catch { return ''; } })() : '';
+ const globalProtocol = publicBaseUrl ? (() => { try { return new URL(publicBaseUrl).protocol + '//'; } catch { return 'https://'; } })() : 'https://';
+
+ const checkPrefix = async (prefix: string) => {
+ if (!prefix || prefix === company.subdomain_prefix) { setPrefixStatus('idle'); return; }
+ setPrefixStatus('checking');
+ try {
+ const res = await fetchJson
(
+ `/tenants/check-prefix?prefix=${encodeURIComponent(prefix)}&exclude_tenant_id=${company.id}`
+ );
+ setPrefixStatus(res.available ? 'available' : 'taken');
+ } catch { setPrefixStatus('idle'); }
+ };
+
const handleSave = async () => {
+ if (prefixStatus === 'taken') {
+ setError(t('admin.prefixTakenError', 'The subdomain prefix is already taken. Please choose a different one.'));
+ return;
+ }
setSaving(true);
setError('');
try {
- await adminApi.updateCompany(company.id, {
- sso_enabled: ssoEnabled,
- sso_domain: ssoDomain.trim() || null,
- });
+ const updateData: any = {
+ subdomain_prefix: subdomainPrefix.trim() || null,
+ };
+ if (isDefault !== !!company.is_default) {
+ updateData.is_default = isDefault;
+ }
+ await adminApi.updateCompany(company.id, updateData);
onUpdated();
onClose();
} catch (e: any) {
@@ -1106,6 +1435,28 @@ function EditCompanyModal({ company, onClose, onUpdated }: { company: any, onClo
setSaving(false);
};
+ const StatusBadge = ({ status }: { status: 'idle' | 'checking' | 'available' | 'taken' }) => {
+ if (status === 'checking') return {t('admin.prefixChecking', 'checking...')} ;
+ if (status === 'available') return {t('admin.prefixAvailable', 'Available')} ;
+ if (status === 'taken') return {t('admin.prefixTaken', 'Already taken')} ;
+ return null;
+ };
+
+ const switchStyle: React.CSSProperties = {
+ position: 'relative', display: 'inline-block', width: '40px', height: '22px',
+ cursor: 'pointer', flexShrink: 0,
+ };
+ const switchTrack = (checked: boolean): React.CSSProperties => ({
+ position: 'absolute', inset: 0,
+ background: checked ? 'var(--accent-primary)' : 'var(--bg-tertiary)',
+ borderRadius: '11px', transition: 'background 0.2s',
+ });
+ const switchThumb = (checked: boolean): React.CSSProperties => ({
+ position: 'absolute', left: checked ? '20px' : '2px', top: '2px',
+ width: '18px', height: '18px', background: '#fff',
+ borderRadius: '50%', transition: 'left 0.2s',
+ });
+
return (
e.stopPropagation()}>
-
+
{t('admin.editCompany', 'Edit Company')}: {company.name}
@@ -1127,46 +1478,87 @@ function EditCompanyModal({ company, onClose, onUpdated }: { company: any, onClo
-
-
- {t('admin.ssoConfigTitle', 'SSO & Domain Configuration')}
-
-
- {t('admin.ssoConfigDesc', 'Configure SSO and custom domain for this company.')}
-
-
-
-
- setSsoEnabled(e.target.checked)}
- style={{ width: '16px', height: '16px', cursor: 'pointer' }}
- />
- {t('admin.ssoEnabled', 'Enable SSO')}
-
+ {/* Default company toggle */}
+
+
+
+
+ {t('admin.setAsDefault', 'Default Company')}
+
+
+ {t('admin.setAsDefaultDesc', 'Users visiting the global domain will be associated with this company.')}
+
+
+ {company.is_default ? (
+
+ {t('admin.currentDefault', 'Current Default')}
+
+ ) : (
+
+ setIsDefault(e.target.checked)}
+ style={{ opacity: 0, width: 0, height: 0 }} />
+
+
+
+
+ )}
+
-
-
{t('admin.ssoDomain', 'Custom Access Domain')}
-
setSsoDomain(e.target.value)}
- placeholder={t('admin.ssoDomainPlaceholder', 'e.g. acme.clawith.com')}
- style={{ fontSize: '13px' }}
- />
+ {/* Domain configuration section */}
+
+
+ {t('admin.domainConfig', 'Domain Configuration')}
+
+
+ {t('admin.domainConfigDesc', 'Configure a dedicated access domain for this company.')}
+
+
+
+
+ {t('admin.subdomainPrefix', 'Subdomain Prefix')}
+
+
+ {
+ const v = e.target.value.toLowerCase().replace(/[^a-z0-9-]/g, '');
+ setSubdomainPrefix(v);
+ setPrefixStatus('idle');
+ }}
+ onBlur={() => checkPrefix(subdomainPrefix)}
+ placeholder={company.slug}
+ style={{ fontSize: '13px', maxWidth: '120px' }}
+ />
+ {globalHostname && (
+
+ .{globalHostname}
+
+ )}
+
+
+ {(subdomainPrefix || company.slug) && globalHostname && (
+
+ {globalProtocol}{subdomainPrefix || company.slug}.{globalHostname}
+
+ )}
- {error &&
{error}
}
+ {error &&
{error}
}
{t('common.cancel', 'Cancel')}
-
+
{saving ? t('common.loading') : t('common.save', 'Save')}
diff --git a/frontend/src/pages/AgentCreate.tsx b/frontend/src/pages/AgentCreate.tsx
index 8816eb9a..869d37dc 100644
--- a/frontend/src/pages/AgentCreate.tsx
+++ b/frontend/src/pages/AgentCreate.tsx
@@ -2,6 +2,7 @@ import { useState, useEffect } from 'react';
import { useNavigate } from 'react-router-dom';
import { useTranslation } from 'react-i18next';
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
+import { Eye, Settings } from 'lucide-react';
import { agentApi, channelApi, enterpriseApi, skillApi } from '../services/api';
import ChannelConfig from '../components/ChannelConfig';
import LinearCopyButton from '../components/LinearCopyButton';
@@ -786,10 +787,10 @@ For humans, the message is delivered via their available channel (e.g. Feishu).`
{t('wizard.step4.accessLevel', 'Default Access Level')}
- {[
- { value: 'use', icon: '👁️', label: t('wizard.step4.useLevel', 'Use'), desc: t('wizard.step4.useDesc', 'Can use Task, Chat, Tools, Skills, Workspace') },
- { value: 'manage', icon: '⚙️', label: t('wizard.step4.manageLevel', 'Manage'), desc: t('wizard.step4.manageDesc', 'Full access including Settings, Mind, Relationships') },
- ].map((lvl) => (
+ {([
+ { value: 'use', icon: , label: t('wizard.step4.useLevel', 'Use'), desc: t('wizard.step4.useDesc', 'Can use Task, Chat, Tools, Skills, Workspace') },
+ { value: 'manage', icon: , label: t('wizard.step4.manageLevel', 'Manage'), desc: t('wizard.step4.manageDesc', 'Full access including Settings, Mind, Relationships') },
+ ] as const).map((lvl) => (
{
@@ -244,7 +245,7 @@ function ToolsManager({ agentId, canManage = false }: { agentId: string; canMana
onClick={() => openCategoryConfig(category)}
style={{ background: 'none', border: '1px solid var(--border-subtle)', borderRadius: '6px', padding: '3px 8px', fontSize: '11px', cursor: 'pointer', color: 'var(--text-secondary)' }}
title={`Configure ${category}`}
- >⚙️ Config
+ > Config
)}
{canManage && (
@@ -275,6 +276,7 @@ function ToolsManager({ agentId, canManage = false }: { agentId: string; canMana
)}
+
{(catTools as any[]).map((tool: any) => {
@@ -310,7 +312,7 @@ function ToolsManager({ agentId, canManage = false }: { agentId: string; canMana
onClick={() => openConfig(tool)}
style={{ background: 'none', border: '1px solid var(--border-subtle)', borderRadius: '6px', padding: '3px 8px', fontSize: '11px', cursor: 'pointer', color: 'var(--text-secondary)' }}
title="Configure per-agent settings"
- >⚙️ Config
+ > Config
)}
{canManage && tool.source === 'agent' && tool.agent_tool_id && (
{deletingToolId === tool.id ? '...' : '✕'}
+ >{deletingToolId === tool.id ? '...' : }
)}
{canManage ? (
@@ -382,7 +384,7 @@ function ToolsManager({ agentId, canManage = false }: { agentId: string; canMana
boxShadow: toolTab === 'company' ? '0 1px 3px rgba(0,0,0,0.1)' : 'none',
}}
>
- {t('agent.tools.companyTools', 'Company Tools')} ({companyTools.length})
+ {t('agent.tools.companyTools', 'Company Tools')} ({companyTools.length})
setToolTab('installed')}
@@ -394,7 +396,7 @@ function ToolsManager({ agentId, canManage = false }: { agentId: string; canMana
boxShadow: toolTab === 'installed' ? '0 1px 3px rgba(0,0,0,0.1)' : 'none',
}}
>
- {t('agent.tools.agentInstalled', 'Agent Self-Installed Tools')} ({agentInstalledTools.length})
+ {t('agent.tools.agentInstalled', 'Agent Self-Installed Tools')} ({agentInstalledTools.length})
@@ -425,10 +427,10 @@ function ToolsManager({ agentId, canManage = false }: { agentId: string; canMana
e.stopPropagation()} style={{ background: 'var(--bg-primary)', borderRadius: '12px', padding: '24px', width: '480px', maxWidth: '95vw', maxHeight: '80vh', overflow: 'auto', boxShadow: '0 20px 60px rgba(0,0,0,0.4)' }}>
-
⚙️ {title}
+
{title}
{isCat ? 'Shared category configuration (affects all tools in this category)' : 'Per-agent configuration (overrides global defaults)'}
-
{ setConfigTool(null); setConfigCategory(null); }} style={{ background: 'none', border: 'none', fontSize: '18px', cursor: 'pointer', color: 'var(--text-secondary)' }}>✕
+
{ setConfigTool(null); setConfigCategory(null); }} style={{ background: 'none', border: 'none', fontSize: '18px', cursor: 'pointer', color: 'var(--text-secondary)' }}>
{fields.length > 0 ? (
@@ -1009,7 +1011,7 @@ function AgentDetailInner() {
const location = useLocation();
const validTabs = ['status', 'aware', 'mind', 'tools', 'skills', 'relationships', 'workspace', 'chat', 'activityLog', 'approvals', 'settings'];
const hashTab = location.hash?.replace('#', '');
- const [activeTab, setActiveTabRaw] = useState
(hashTab && validTabs.includes(hashTab) ? hashTab : 'status');
+ const [activeTab, setActiveTabRaw] = useState(hashTab && validTabs.includes(hashTab) ? hashTab : 'chat');
// Sync URL hash when tab changes
const setActiveTab = (tab: string) => {
@@ -1696,7 +1698,7 @@ function AgentDetailInner() {
// Memoized component for each chat message to avoid re-renders while typing
const ChatMessageItem = React.useMemo(() => React.memo(({ msg, i, isLeft, t }: { msg: any, i: number, isLeft: boolean, t: any }) => {
const fe = msg.fileName?.split('.').pop()?.toLowerCase() ?? '';
- const fi = fe === 'pdf' ? '📄' : (fe === 'csv' || fe === 'xlsx' || fe === 'xls') ? '📊' : (fe === 'docx' || fe === 'doc') ? '📝' : '📎';
+ const fi = fe === 'pdf' ? : (fe === 'csv' || fe === 'xlsx' || fe === 'xls') ? : (fe === 'docx' || fe === 'doc') ? : ;
const isImage = msg.imageUrl && ['png', 'jpg', 'jpeg', 'gif', 'webp', 'bmp'].includes(fe);
const timestampHtml = msg.timestamp ? (() => {
@@ -1720,7 +1722,7 @@ function AgentDetailInner() {
{isLeft ? (msg.sender_name ? msg.sender_name[0] : 'A') : 'U'}
- {isLeft && msg.sender_name &&
🤖 {msg.sender_name}
}
+ {isLeft && msg.sender_name &&
{msg.sender_name}
}
{isImage ? (
@@ -1733,7 +1735,7 @@ function AgentDetailInner() {
))}
{msg.thinking && (
- 💭 Thinking
+ Thinking
{msg.thinking}
)}
@@ -1801,7 +1803,7 @@ function AgentDetailInner() {
let filesDisplay = '';
attachedFiles.forEach(file => {
- filesDisplay += `[📎 ${file.name}] `;
+ filesDisplay += `[${file.name}] `;
if (file.imageUrl && supportsVision) {
filesPrompt += `[image_data:${file.imageUrl}]\n`;
} else if (file.imageUrl) {
@@ -2039,7 +2041,7 @@ function AgentDetailInner() {
},
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: ['schedules', id] });
- showToast('✅ Schedule triggered — executing in background', 'success');
+ showToast('Schedule triggered — executing in background', 'success');
},
onError: (err: any) => {
const msg = err?.response?.data?.detail || err?.message || 'Failed to trigger schedule';
@@ -2281,7 +2283,7 @@ function AgentDetailInner() {
style={{ background: 'none', border: 'none', cursor: 'pointer', fontSize: '11px', color: 'var(--text-tertiary)', padding: '1px 4px', borderRadius: '4px', lineHeight: 1 }}
onMouseEnter={e => (e.currentTarget.style.background = 'var(--bg-secondary)')}
onMouseLeave={e => (e.currentTarget.style.background = 'none')}
- >✏️ {t((agent as any).expires_at || (agent as any).is_expired ? 'agent.settings.expiry.renew' : 'agent.settings.expiry.setExpiry')}
+ >
{t((agent as any).expires_at || (agent as any).is_expired ? 'agent.settings.expiry.renew' : 'agent.settings.expiry.setExpiry')}
)}
@@ -2335,19 +2337,19 @@ function AgentDetailInner() {
{/* Metric cards */}
-
📋 {t('agent.tabs.status')}
+
{t('agent.tabs.status')}
{t(`agent.status.${statusKey}`)}
-
🗓️ {t('agent.settings.today')} Token
+
{t('agent.settings.today')} Token
{formatTokens(agent.tokens_used_today)}
{agent.max_tokens_per_day &&
{t('agent.settings.noLimit')} {formatTokens(agent.max_tokens_per_day)}
}
-
📅 {t('agent.settings.month')} Token
+
{t('agent.settings.month')} Token
{formatTokens(agent.tokens_used_month)}
{agent.max_tokens_per_month &&
{t('agent.settings.noLimit')} {formatTokens(agent.max_tokens_per_month)}
}
@@ -2365,7 +2367,7 @@ function AgentDetailInner() {
{metrics && (
<>
-
✅ {t('agent.tasks.done')}
+
{t('agent.tasks.done')}
{metrics.tasks?.done || 0}/{metrics.tasks?.total || 0}
{metrics.tasks?.completion_rate || 0}%
@@ -2482,7 +2484,7 @@ function AgentDetailInner() {
{activityLogs && activityLogs.length > 0 && (
-
📊 Recent Activity
+ Recent Activity
setActiveTab('activityLog')}>View All →
@@ -3191,7 +3193,7 @@ function AgentDetailInner() {
{/* Soul Section */}
- 🧬 {t('agent.soul.title')}
+ {t('agent.soul.title')}
{t('agent.mind.soulDesc', 'Core identity, personality, and behavior boundaries.')}
@@ -3202,7 +3204,7 @@ function AgentDetailInner() {
{/* Memory Section */}
- 🧠 {t('agent.memory.title')}
+ {t('agent.memory.title')}
{t('agent.mind.memoryDesc', 'Persistent memory accumulated through conversations and experiences.')}
@@ -3213,13 +3215,26 @@ function AgentDetailInner() {
{/* Heartbeat Section */}
- 💓 {t('agent.mind.heartbeatTitle', 'Heartbeat')}
+ {t('agent.mind.heartbeatTitle', 'Heartbeat')}
{t('agent.mind.heartbeatDesc', 'Instructions for periodic awareness checks. The agent reads this file during each heartbeat.')}
+
+ {/* Secrets Section — only visible to agent creator */}
+ {((agent as any)?.creator_id === currentUser?.id || currentUser?.role === 'platform_admin') && (
+
+
+ {t('agent.mind.secretsTitle', 'Secrets')}
+
+
+ {t('agent.mind.secretsDesc', 'Sensitive credentials (passwords, API keys, connection strings). Only visible to the agent creator.')}
+
+
+
+ )}
);
})()
@@ -3414,8 +3429,8 @@ function AgentDetailInner() {
setShowImportSkillModal(false)}>
e.stopPropagation()} style={{ background: 'var(--bg-primary)', borderRadius: '12px', padding: '24px', maxWidth: '600px', width: '90%', maxHeight: '70vh', display: 'flex', flexDirection: 'column', boxShadow: '0 20px 60px rgba(0,0,0,0.3)' }}>
-
📦 {t('agent.skills.importPreset', 'Import from Presets')}
-
setShowImportSkillModal(false)} style={{ background: 'none', border: 'none', fontSize: '18px', cursor: 'pointer', color: 'var(--text-secondary)', padding: '4px 8px' }}>✕
+
{t('agent.skills.importPreset', 'Import from Presets')}
+
setShowImportSkillModal(false)} style={{ background: 'none', border: 'none', fontSize: '18px', cursor: 'pointer', color: 'var(--text-secondary)', padding: '4px 8px' }}>
{t('agent.skills.importDesc', 'Select a preset skill to import into this agent. All skill files will be copied to the agent\'s skills folder.')}
@@ -3439,7 +3454,7 @@ function AgentDetailInner() {
onMouseLeave={e => (e.currentTarget.style.borderColor = 'var(--border-subtle)')}
>
-
{skill.icon || '📋'}
+
{skill.icon || }
{skill.name}
@@ -3447,7 +3462,7 @@ function AgentDetailInner() {
📁 {skill.folder_name}
- {skill.is_default && ✓ Default }
+ {skill.is_default && Default }
@@ -3469,7 +3484,7 @@ function AgentDetailInner() {
}
}}
>
- {importingSkillId === skill.id ? '⏳ ...' : '⬇️ Import'}
+ {importingSkillId === skill.id ? '...' : <>
Import>}
))
@@ -3677,7 +3692,7 @@ function AgentDetailInner() {
<>
- {activeSession.source_channel === 'agent' ? `🤖 Agent Conversation · ${activeSession.username || 'Agents'}` : `Read-only · ${activeSession.username || 'User'}`}
+ {activeSession.source_channel === 'agent' ? `Agent Conversation · ${activeSession.username || 'Agents'}` : `Read-only · ${activeSession.username || 'User'}`}
{(() => {
// For A2A sessions, determine which participant is "this agent" (left side)
@@ -3702,7 +3717,7 @@ function AgentDetailInner() {
- ⚡
+
{tName}
{tArgs && typeof tArgs === 'object' && Object.keys(tArgs).length > 0 && {`(${Object.entries(tArgs).map(([k, v]) => `${k}: ${typeof v === 'string' ? v.slice(0, 30) : JSON.stringify(v)}`).join(', ')})`} }
@@ -3767,7 +3782,7 @@ function AgentDetailInner() {
- {msg.toolStatus === 'running' ? '⏳' : '⚡'}
+ {msg.toolStatus === 'running' ? : }
{msg.toolName}
{msg.toolArgs && Object.keys(msg.toolArgs).length > 0 && {`(${Object.entries(msg.toolArgs).map(([k, v]) => `${k}: ${typeof v === 'string' ? v.slice(0, 30) : JSON.stringify(v)}`).join(', ')})`} }
{msg.toolStatus === 'running' && {t('common.loading')} }
@@ -3999,7 +4014,7 @@ function AgentDetailInner() {
filteredLogs = activityLogs.filter((l: any) => messageTypes.includes(l.action_type));
}
- const filterBtn = (key: string, label: string, indent = false) => (
+ const filterBtn = (key: string, label: React.ReactNode, indent = false) => (
setLogFilter(key)}
@@ -4026,15 +4041,15 @@ function AgentDetailInner() {
{/* Filter tabs */}
- {filterBtn('user', '👤 ' + t('agent.activityLog.userActions', 'User Actions'))}
+ {filterBtn('user', <>
{t('agent.activityLog.userActions', 'User Actions')}>)}
{(agent as any)?.agent_type !== 'openclaw' && (<>
- {filterBtn('backend', '⚙️ ' + t('agent.activityLog.backendServices', 'Backend Services'))}
+ {filterBtn('backend', <>
{t('agent.activityLog.backendServices', 'Backend Services')}>)}
{(logFilter === 'backend' || logFilter === 'heartbeat' || logFilter === 'schedule' || logFilter === 'messages') && (
<>
│
- {filterBtn('heartbeat', '💓 ' + t('agent.mind.heartbeatTitle'))}
- {filterBtn('schedule', '⏰ ' + t('agent.activityLog.scheduleCron'), true)}
- {filterBtn('messages', '📨 ' + t('agent.activityLog.messages'), true)}
+ {filterBtn('heartbeat', <>
{t('agent.mind.heartbeatTitle')}>)}
+ {filterBtn('schedule', <>
{t('agent.activityLog.scheduleCron')}>, true)}
+ {filterBtn('messages', <>
{t('agent.activityLog.messages')}>, true)}
>
)}
>)}
@@ -4043,11 +4058,11 @@ function AgentDetailInner() {
{filteredLogs.length > 0 ? (
{filteredLogs.map((log: any) => {
- const icons: Record
= {
- chat_reply: '💬', tool_call: '⚡', feishu_msg_sent: '📤',
- agent_msg_sent: '🤖', web_msg_sent: '🌐', task_created: '📋',
- task_updated: '✅', file_written: '📝', error: '❌',
- schedule_run: '⏰', heartbeat: '💓', plaza_post: '🏛️',
+ const icons: Record = {
+ chat_reply: , tool_call: , feishu_msg_sent: ,
+ agent_msg_sent: , web_msg_sent: , task_created: ,
+ task_updated: , file_written: , error: ,
+ schedule_run: , heartbeat: , plaza_post: ,
};
const time = log.created_at ? new Date(log.created_at).toLocaleString('zh-CN', {
month: 'short', day: 'numeric', hour: '2-digit', minute: '2-digit', second: '2-digit',
@@ -4071,7 +4086,7 @@ function AgentDetailInner() {
{log.summary}
{time} · {log.action_type}
- {log.detail && !isExpanded && ▸ Details }
+ {log.detail && !isExpanded && Details }
@@ -4374,7 +4389,7 @@ function AgentDetailInner() {
{/* Max Tool Call Rounds */}
-
🔧 {t('agent.settings.maxToolRounds', 'Max Tool Call Rounds')}
+
{t('agent.settings.maxToolRounds', 'Max Tool Call Rounds')}
{t('agent.settings.maxToolRoundsLabel', 'Maximum rounds per message')}
{isChinese ? '欢迎语' : 'Welcome Message'}
- {wmSaved && ✓ {isChinese ? '已保存' : 'Saved'} }
+ {wmSaved && {isChinese ? '已保存' : 'Saved'} }
{isChinese
@@ -4587,9 +4602,9 @@ function AgentDetailInner() {
{/* Permission Management */}
{(() => {
- const scopeLabels: Record = {
- company: '🏢 ' + t('agent.settings.perm.companyWide', 'Company-wide'),
- user: '👤 ' + t('agent.settings.perm.onlyMe', 'Only Me'),
+ const scopeLabels: Record = {
+ company: <> {t('agent.settings.perm.companyWide', 'Company-wide')}>,
+ user: <> {t('agent.settings.perm.onlyMe', 'Only Me')}>,
};
const handleScopeChange = async (newScope: string) => {
@@ -4626,7 +4641,7 @@ function AgentDetailInner() {
return (
-
🔒 {t('agent.settings.perm.title', 'Access Permissions')}
+
{t('agent.settings.perm.title', 'Access Permissions')}
{t('agent.settings.perm.description', 'Control who can see and interact with this agent. Only the creator or admin can change this.')}
@@ -4679,8 +4694,8 @@ function AgentDetailInner() {
{t('agent.settings.perm.defaultAccess', 'Default Access Level')}
- {[{ val: 'use', label: '👁️ ' + t('agent.settings.perm.useAccess', 'Use'), desc: t('agent.settings.perm.useAccessDesc', 'Task, Chat, Tools, Skills, Workspace') },
- { val: 'manage', label: '⚙️ ' + t('agent.settings.perm.manageAccess', 'Manage'), desc: t('agent.settings.perm.manageAccessDesc', 'Full access including Settings, Mind, Relationships') }].map(opt => (
+ {[{ val: 'use', label: <>
{t('agent.settings.perm.useAccess', 'Use')}>, desc: t('agent.settings.perm.useAccessDesc', 'Task, Chat, Tools, Skills, Workspace') },
+ { val: 'manage', label: <>
{t('agent.settings.perm.manageAccess', 'Manage')}>, desc: t('agent.settings.perm.manageAccessDesc', 'Full access including Settings, Mind, Relationships') }].map(opt => (
- {t('agent.settings.timezone.title', '🌐 Timezone')}
+ {<> {t('agent.settings.timezone.title', 'Timezone')}>}
{t('agent.settings.timezone.description', 'The timezone used for this agent\'s scheduling, active hours, and time awareness. Defaults to the company timezone if not set.')}
@@ -4993,12 +5008,12 @@ function AgentDetailInner() {
e.stopPropagation()}>
-
⏰ {t('agent.settings.expiry.title')}
+ {t('agent.settings.expiry.title')}
setShowExpiryModal(false)} style={{ background: 'none', border: 'none', cursor: 'pointer', color: 'var(--text-tertiary)', fontSize: '18px', lineHeight: 1 }}>×
{(agent as any).is_expired
- ?
⏰ {t('agent.settings.expiry.expired')}
+ ?
{t('agent.settings.expiry.expired')}
: (agent as any).expires_at
? <>{t('agent.settings.expiry.currentExpiry')}
{new Date((agent as any).expires_at).toLocaleString(i18n.language === 'zh' ? 'zh-CN' : 'en-US')} >
:
{t('agent.settings.expiry.neverExpires')}
@@ -5028,7 +5043,7 @@ function AgentDetailInner() {
saveExpiry(true)} disabled={expirySaving}
style={{ padding: '7px 12px', borderRadius: '8px', border: '1px solid var(--border-subtle)', background: 'none', cursor: 'pointer', fontSize: '12px', color: 'var(--text-secondary)' }}>
- 🔓 {t('agent.settings.expiry.neverExpires')}
+ {t('agent.settings.expiry.neverExpires')}
setShowExpiryModal(false)} disabled={expirySaving}
diff --git a/frontend/src/pages/CompanySetup.tsx b/frontend/src/pages/CompanySetup.tsx
index c17276aa..25fb74b7 100644
--- a/frontend/src/pages/CompanySetup.tsx
+++ b/frontend/src/pages/CompanySetup.tsx
@@ -1,6 +1,7 @@
import { useState, useEffect } from 'react';
import { useNavigate, useLocation, useSearchParams } from 'react-router-dom';
import { useTranslation } from 'react-i18next';
+import { Globe, AlertTriangle } from 'lucide-react';
import { useAuthStore } from '../stores';
import { tenantApi, authApi } from '../services/api';
@@ -120,7 +121,7 @@ export default function CompanySetup() {
background: 'var(--bg-secondary)', border: '1px solid var(--border-subtle)',
zIndex: 101,
}} onClick={toggleLang}>
- 🌐
+
@@ -134,7 +135,7 @@ export default function CompanySetup() {
{error && (
)}
diff --git a/frontend/src/pages/Dashboard.tsx b/frontend/src/pages/Dashboard.tsx
index 577ff9ff..5de72d33 100644
--- a/frontend/src/pages/Dashboard.tsx
+++ b/frontend/src/pages/Dashboard.tsx
@@ -406,7 +406,7 @@ export default function Dashboard() {
// Greeting
const hour = new Date().getHours();
- const greeting = hour < 6 ? '🌙 ' + t('dashboard.greeting.lateNight') : hour < 12 ? '☀️ ' + t('dashboard.greeting.morning') : hour < 18 ? '🌤️ ' + t('dashboard.greeting.afternoon') : '🌙 ' + t('dashboard.greeting.evening');
+ const greeting = hour < 6 ? t('dashboard.greeting.lateNight') : hour < 12 ? t('dashboard.greeting.morning') : hour < 18 ? t('dashboard.greeting.afternoon') : t('dashboard.greeting.evening');
return (
diff --git a/frontend/src/pages/EnterpriseSettings.tsx b/frontend/src/pages/EnterpriseSettings.tsx
index 8166a0bc..d636d9ef 100644
--- a/frontend/src/pages/EnterpriseSettings.tsx
+++ b/frontend/src/pages/EnterpriseSettings.tsx
@@ -130,8 +130,8 @@ function SsoChannelSection({ idpType, existingProvider, tenant, t }: {
}, [existingProvider?.sso_domain, tenant?.sso_domain]);
const ssoEnabled = existingProvider ? !!existingProvider.sso_login_enabled : false;
- const domain = liveDomain;
- const callbackUrl = domain ? (domain.startsWith('http') ? `${domain}/api/auth/${idpType}/callback` : `https://${domain}/api/auth/${idpType}/callback`) : '';
+ const domain = liveDomain || window.location.origin;
+ const callbackUrl = domain.startsWith('http') ? `${domain}/api/auth/${idpType}/callback` : `https://${domain}/api/auth/${idpType}/callback`;
const handleSsoToggle = async () => {
if (!existingProvider) {
@@ -281,29 +281,23 @@ function OrgTab({ tenant }: { tenant: any }) {
const SsoStatus = () => {
- const [isExpanded, setIsExpanded] = useState(!!tenant?.sso_enabled);
- const [ssoEnabled, setSsoEnabled] = useState(!!tenant?.sso_enabled);
const [ssoDomain, setSsoDomain] = useState(tenant?.sso_domain || '');
const [saving, setSaving] = useState(false);
const [error, setError] = useState('');
useEffect(() => {
- setSsoEnabled(!!tenant?.sso_enabled);
setSsoDomain(tenant?.sso_domain || '');
- setIsExpanded(!!tenant?.sso_enabled);
}, [tenant]);
- const handleSave = async (forceEnabled?: boolean) => {
+ const handleSave = async () => {
if (!tenant?.id) return;
- const targetEnabled = forceEnabled !== undefined ? forceEnabled : ssoEnabled;
setSaving(true);
setError('');
try {
await fetchJson(`/tenants/${tenant.id}`, {
method: 'PUT',
body: JSON.stringify({
- sso_enabled: targetEnabled,
- sso_domain: targetEnabled ? (ssoDomain.trim() || null) : null,
+ sso_domain: ssoDomain.trim() || null,
}),
});
qc.invalidateQueries({ queryKey: ['tenant', tenant.id] });
@@ -313,79 +307,32 @@ function OrgTab({ tenant }: { tenant: any }) {
setSaving(false);
};
- const handleToggle = (e: React.ChangeEvent
) => {
- const checked = e.target.checked;
- setSsoEnabled(checked);
- setIsExpanded(checked);
- if (!checked) {
- // auto-save when disabling
- handleSave(false);
- }
- };
-
return (
-
-
-
- {t('enterprise.identity.ssoTitle', 'Enterprise SSO')}
-
-
- {t('enterprise.identity.ssoDisabledHint', 'Seamless enterprise login via Single Sign-On.')}
-
+
+
+ {t('enterprise.identity.ssoTitle', 'Enterprise SSO')}
-
-
-
-
-
-
-
+
+ {t('enterprise.identity.ssoDisabledHint', 'Seamless enterprise login via Single Sign-On.')}
-
-
- {isExpanded && (
-
-
-
- {t('enterprise.identity.ssoDomain', 'Custom Access Domain')}
-
-
setSsoDomain(e.target.value)}
- placeholder={t('enterprise.identity.ssoDomainPlaceholder', 'e.g. acme.clawith.com')}
- style={{ fontSize: '13px', width: '100%', maxWidth: '400px' }}
- />
-
- {t('enterprise.identity.ssoDomainDesc', 'The custom domain users will use to log in via SSO.')}
-
-
- {error &&
{error}
}
-
-
-
handleSave()} disabled={saving || !ssoDomain.trim()}>
- {saving ? t('common.loading') : t('common.save', 'Save Configuration')}
-
+
+
+ {t('enterprise.identity.ssoDomain', 'Custom Access Domain')}
+
+
setSsoDomain(e.target.value)}
+ placeholder={t('enterprise.identity.ssoDomainPlaceholder', 'e.g. acme.clawith.com')}
+ style={{ fontSize: '13px', width: '100%', maxWidth: '400px' }}
+ />
+
+ {t('enterprise.identity.ssoDomainDesc', 'The custom domain users will use to log in via SSO.')}
- )}
+
);
};
@@ -410,7 +357,8 @@ function OrgTab({ tenant }: { tenant: any }) {
authorize_url: '',
token_url: '',
user_info_url: '',
- scope: 'openid profile email'
+ scope: 'openid profile email',
+ field_mapping: { user_id: '', name: '', email: '', mobile: '' } as Record
});
const currentTenantId = localStorage.getItem('current_tenant_id') || '';
@@ -448,14 +396,14 @@ function OrgTab({ tenant }: { tenant: any }) {
// Mutations
const addProvider = useMutation({
mutationFn: (data: any) => {
- const payload = { ...data, tenant_id: currentTenantId, is_active: true };
- if (data.provider_type === 'oauth2' && useOAuth2Form) {
+ if (data.provider_type === 'oauth2') {
+ // 直接发送,config 已经在正确位置
return fetchJson('/enterprise/identity-providers/oauth2', {
method: 'POST',
- body: JSON.stringify(payload)
+ body: JSON.stringify({ ...data, tenant_id: currentTenantId })
});
}
- return fetchJson('/enterprise/identity-providers', { method: 'POST', body: JSON.stringify(payload) });
+ return fetchJson('/enterprise/identity-providers', { method: 'POST', body: JSON.stringify(data) });
},
onSuccess: () => {
qc.invalidateQueries({ queryKey: ['identity-providers'] });
@@ -469,7 +417,8 @@ function OrgTab({ tenant }: { tenant: any }) {
const updateProvider = useMutation({
mutationFn: ({ id, data }: { id: string; data: any }) => {
- if (data.provider_type === 'oauth2' && useOAuth2Form) {
+ if (data.provider_type === 'oauth2') {
+ // 直接发送,config 已经在正确位置
return fetchJson(`/enterprise/identity-providers/${id}/oauth2`, {
method: 'PATCH',
body: JSON.stringify(data)
@@ -507,14 +456,24 @@ function OrgTab({ tenant }: { tenant: any }) {
setSyncing(null);
};
- const initOAuth2FromConfig = (config: any) => ({
- app_id: config?.app_id || config?.client_id || '',
- app_secret: config?.app_secret || config?.client_secret || '',
- authorize_url: config?.authorize_url || '',
- token_url: config?.token_url || '',
- user_info_url: config?.user_info_url || '',
- scope: config?.scope || 'openid profile email'
- });
+ const initOAuth2FromConfig = (config: any): { app_id: string; app_secret: string; authorize_url: string; token_url: string; user_info_url: string; scope: string; field_mapping: { user_id: string; name: string; email: string; mobile: string } | null } => {
+ const fm = config?.field_mapping;
+ const hasFieldMapping = fm && Object.keys(fm).length > 0;
+ return {
+ app_id: config?.app_id || config?.client_id || '',
+ app_secret: config?.app_secret || config?.client_secret || '',
+ authorize_url: config?.authorize_url || '',
+ token_url: config?.token_url || '',
+ user_info_url: config?.user_info_url || '',
+ scope: config?.scope || 'openid profile email',
+ field_mapping: hasFieldMapping ? {
+ user_id: fm.user_id || '',
+ name: fm.name || '',
+ email: fm.email || '',
+ mobile: fm.mobile || '',
+ } : null,
+ };
+ };
const save = () => {
setSavingProvider(true);
@@ -527,10 +486,10 @@ function OrgTab({ tenant }: { tenant: any }) {
};
const IDP_TYPES = [
- { type: 'feishu', name: 'Feishu', desc: 'Feishu / Lark Integration', icon: },
- { type: 'wecom', name: 'WeCom', desc: 'WeChat Work Integration', icon: },
- { type: 'dingtalk', name: 'DingTalk', desc: 'DingTalk App Integration', icon: },
- { type: 'oauth2', name: 'OAuth2', desc: 'Generic OIDC Provider', icon: O
}
+ { type: 'feishu', name: 'Feishu', desc: t('enterprise.identity.feishuDesc', 'Feishu / Lark Integration'), icon: },
+ { type: 'wecom', name: 'WeCom', desc: t('enterprise.identity.wecomDesc', 'WeChat Work Integration'), icon: },
+ { type: 'dingtalk', name: 'DingTalk', desc: t('enterprise.identity.dingtalkDesc', 'DingTalk App Integration'), icon: },
+ { type: 'oauth2', name: 'OAuth2', desc: t('enterprise.identity.oauth2Desc', 'Generic OIDC Provider'), icon: O
}
];
const handleExpand = (type: string, existingProvider?: any) => {
@@ -543,7 +502,15 @@ function OrgTab({ tenant }: { tenant: any }) {
setUseOAuth2Form(type === 'oauth2');
if (existingProvider) {
- setForm({ ...existingProvider, ...(type === 'oauth2' ? initOAuth2FromConfig(existingProvider.config) : {}) });
+ if (type === 'oauth2') {
+ const oauth2Config = initOAuth2FromConfig(existingProvider.config || {});
+ setForm({
+ ...existingProvider,
+ config: oauth2Config,
+ } as any);
+ } else {
+ setForm({ ...existingProvider });
+ }
} else {
const defaults: any = {
feishu: { app_id: '', app_secret: '', corp_id: '' },
@@ -551,13 +518,31 @@ function OrgTab({ tenant }: { tenant: any }) {
wecom: { corp_id: '', secret: '', agent_id: '', bot_id: '', bot_secret: '' },
};
const nameMap: Record = { feishu: 'Feishu', wecom: 'WeCom', dingtalk: 'DingTalk', oauth2: 'OAuth2' };
- setForm({
- provider_type: type,
- name: nameMap[type] || type,
- config: defaults[type] || {},
- app_id: '', app_secret: '', authorize_url: '', token_url: '', user_info_url: '',
- scope: 'openid profile email'
- });
+ if (type === 'oauth2') {
+ setForm({
+ provider_type: type,
+ name: nameMap[type] || type,
+ is_active: true,
+ sso_login_enabled: true,
+ config: {
+ app_id: '',
+ app_secret: '',
+ authorize_url: '',
+ token_url: '',
+ user_info_url: '',
+ scope: 'openid profile email',
+ field_mapping: null,
+ },
+ tenant_id: currentTenantId,
+ } as any);
+ } else {
+ setForm({
+ provider_type: type,
+ name: nameMap[type] || type,
+ config: defaults[type] || {},
+ tenant_id: currentTenantId,
+ } as any);
+ }
}
setSelectedDept(null);
setMemberSearch('');
@@ -633,16 +618,78 @@ function OrgTab({ tenant }: { tenant: any }) {
{type === 'oauth2' ? (
- Client ID
- setForm({ ...form, app_id: e.target.value })} />
+ {t('enterprise.identity.clientId')}
+ setForm({ ...form, config: { ...form.config, app_id: e.target.value } })} />
- Client Secret
- setForm({ ...form, app_secret: e.target.value })} />
+ {t('enterprise.identity.clientSecret')}
+ setForm({ ...form, config: { ...form.config, app_secret: e.target.value } })} />
+
+
+ {t('enterprise.identity.authorizeUrl')}
+ setForm({ ...form, config: { ...form.config, authorize_url: e.target.value } })} placeholder={t('enterprise.identity.authorizeUrlPlaceholder')} />
+
+
+ {t('enterprise.identity.tokenUrl')}
+ setForm({ ...form, config: { ...form.config, token_url: e.target.value } })} placeholder={t('enterprise.identity.tokenUrlPlaceholder')} />
+
+
+ {t('enterprise.identity.userInfoUrl')}
+ setForm({ ...form, config: { ...form.config, user_info_url: e.target.value } })} placeholder={t('enterprise.identity.tokenUrlPlaceholder')} />
- Authorize URL
- setForm({ ...form, authorize_url: e.target.value })} />
+ {t('enterprise.identity.scope')}
+ setForm({ ...form, config: { ...form.config, scope: e.target.value } })} placeholder={t('enterprise.identity.scopePlaceholder')} />
+
+
+
+ {t('enterprise.identity.fieldMapping')}
+
+
+ {t('enterprise.identity.fieldMappingHint')}
+
+
+
+
{t('enterprise.identity.userIdField')}
+
+ setForm({ ...form, config: { ...form.config, field_mapping: form.config.field_mapping ? { ...form.config.field_mapping, user_id: e.target.value } : { user_id: e.target.value, name: '', email: '', mobile: '' } } })}
+ placeholder={t('enterprise.identity.userIdFieldPlaceholder')} style={{ fontSize: '12px', flex: 1 }} />
+ setForm({ ...form, config: { ...form.config, field_mapping: form.config.field_mapping ? { ...form.config.field_mapping, user_id: '' } : { user_id: '', name: '', email: '', mobile: '' } } })} title={t('enterprise.identity.clearField')} style={{ minWidth: 'auto', padding: '4px 8px' }}>✕
+
+
+
+
{t('enterprise.identity.nameField')}
+
+ setForm({ ...form, config: { ...form.config, field_mapping: form.config.field_mapping ? { ...form.config.field_mapping, name: e.target.value } : { user_id: '', name: e.target.value, email: '', mobile: '' } } })}
+ placeholder={t('enterprise.identity.nameFieldPlaceholder')} style={{ fontSize: '12px', flex: 1 }} />
+ setForm({ ...form, config: { ...form.config, field_mapping: form.config.field_mapping ? { ...form.config.field_mapping, name: '' } : { user_id: '', name: '', email: '', mobile: '' } } })} title={t('enterprise.identity.clearField')} style={{ minWidth: 'auto', padding: '4px 8px' }}>✕
+
+
+
+
{t('enterprise.identity.emailField')}
+
+ setForm({ ...form, config: { ...form.config, field_mapping: form.config.field_mapping ? { ...form.config.field_mapping, email: e.target.value } : { user_id: '', name: '', email: e.target.value, mobile: '' } } })}
+ placeholder={t('enterprise.identity.emailFieldPlaceholder')} style={{ fontSize: '12px', flex: 1 }} />
+ setForm({ ...form, config: { ...form.config, field_mapping: form.config.field_mapping ? { ...form.config.field_mapping, email: '' } : { user_id: '', name: '', email: '', mobile: '' } } })} title={t('enterprise.identity.clearField')} style={{ minWidth: 'auto', padding: '4px 8px' }}>✕
+
+
+
+
{t('enterprise.identity.mobileField')}
+
+ setForm({ ...form, config: { ...form.config, field_mapping: form.config.field_mapping ? { ...form.config.field_mapping, mobile: e.target.value } : { user_id: '', name: '', email: '', mobile: e.target.value } } })}
+ placeholder={t('enterprise.identity.mobileFieldPlaceholder')} style={{ fontSize: '12px', flex: 1 }} />
+ setForm({ ...form, config: { ...form.config, field_mapping: form.config.field_mapping ? { ...form.config.field_mapping, mobile: '' } : { user_id: '', name: '', email: '', mobile: '' } } })} title={t('enterprise.identity.clearField')} style={{ minWidth: 'auto', padding: '4px 8px' }}>✕
+
+
+
+
+ setForm({ ...form, config: { ...form.config, field_mapping: null } })} disabled={!form.config?.field_mapping} style={{ fontSize: '11px' }}>{t('enterprise.identity.clearAllMappings')}
+ {form.config?.field_mapping && {t('enterprise.identity.hasCustomMapping')} }
+
) : type === 'wecom' ? (
@@ -709,10 +756,10 @@ function OrgTab({ tenant }: { tenant: any }) {
{savingProvider ? t('common.loading') : t('common.save', 'Save')}
{saveProviderOk && (
- Saved
+ {t('enterprise.identity.savedStatus', 'Saved')}
)}
{existingProvider && (
- confirm('Are you sure you want to delete this configuration?') && deleteProvider.mutate(existingProvider.id)}>
+ confirm(t('enterprise.identity.deleteConfirmProvider', 'Are you sure you want to delete this configuration?')) && deleteProvider.mutate(existingProvider.id)}>
{t('common.delete', 'Delete')}
)}
@@ -730,12 +777,12 @@ function OrgTab({ tenant }: { tenant: any }) {
{['feishu', 'dingtalk', 'wecom'].includes(p.provider_type) && (
triggerSync(p.id)} disabled={!!syncing}>
- {syncing === p.id ? 'Syncing...' : 'Sync Directory'}
+ {syncing === p.id ? t('enterprise.identity.syncSyncing', 'Syncing...') : t('enterprise.identity.syncDirectory', 'Sync Directory')}
)}
{syncResult && (
- {syncResult.error ? `Error: ${syncResult.error}` : `Sync complete: ${syncResult.users_created || 0} users created, ${syncResult.profiles_synced || 0} profiles synced.`}
+ {syncResult.error ? t('enterprise.identity.syncError', { error: syncResult.error, defaultValue: 'Error: {{error}}' }) : t('enterprise.identity.syncComplete', { users_created: syncResult.users_created || 0, profiles_synced: syncResult.profiles_synced || 0, defaultValue: 'Sync complete: {{users_created}} users created, {{profiles_synced}} profiles synced.' })}
)}
@@ -756,12 +803,16 @@ function OrgTab({ tenant }: { tenant: any }) {
{members.map((m: any) => (
-
{m.name?.[0]}
-
-
{m.name}
-
- {m.provider_type &&
{m.provider_type} }
- {m.title || '-'} · {m.department_path || m.department_id || '-'}
+
{(m.user_display_name || m.name)?.[0]}
+
+
+ {m.user_display_name || m.name}
+ {m.provider_type && {m.provider_type} }
+
+
+ {m.email && {m.email} }
+ {m.phone && {m.phone} }
+ {!m.email && !m.phone && {m.title || '-'} · {m.department_path || m.department_id || '-'} }
@@ -778,6 +829,18 @@ function OrgTab({ tenant }: { tenant: any }) {
{/* SSO status is now derived from per-channel toggles — no global switch */}
+ {/* Company URL */}
+ {tenant?.subdomain_prefix && (
+
+
+ {t('enterprise.org.companyUrl', 'Company URL')}
+
+
+ {tenant.effective_base_url}
+
+
+ )}
+
{/* 1. Identity Providers Section */}
@@ -785,7 +848,7 @@ function OrgTab({ tenant }: { tenant: any }) {
{t('enterprise.identity.title', 'Organization & Directory Sync')}
- Configure enterprise directory synchronization and Identity Provider settings.
+ {t('enterprise.identity.description', 'Configure enterprise directory synchronization and Identity Provider settings.')}
@@ -810,7 +873,7 @@ function OrgTab({ tenant }: { tenant: any }) {
{existingProvider ? (
- Active
+ {t('enterprise.identity.statusActive', 'Active')}
{existingProvider.last_synced_at && (
Synced: {new Date(existingProvider.last_synced_at).toLocaleDateString()}
@@ -818,7 +881,7 @@ function OrgTab({ tenant }: { tenant: any }) {
)}
) : (
-
Not configured
+
{t('enterprise.identity.statusNotConfigured', 'Not configured')}
)}
▼
diff --git a/frontend/src/pages/Layout.tsx b/frontend/src/pages/Layout.tsx
index 38de02c9..206c3a87 100644
--- a/frontend/src/pages/Layout.tsx
+++ b/frontend/src/pages/Layout.tsx
@@ -3,6 +3,7 @@ import { createPortal } from 'react-dom';
import { Outlet, NavLink, useNavigate } from 'react-router-dom';
import { useTranslation } from 'react-i18next';
import { useQuery, useQueryClient } from '@tanstack/react-query';
+import { Building2 } from 'lucide-react';
import { useAuthStore } from '../stores';
import { agentApi, tenantApi, authApi } from '../services/api';
diff --git a/frontend/src/pages/Login.tsx b/frontend/src/pages/Login.tsx
index 99a31811..3c14a4c6 100644
--- a/frontend/src/pages/Login.tsx
+++ b/frontend/src/pages/Login.tsx
@@ -1,6 +1,7 @@
import { useState, useEffect } from 'react';
import { Link, useNavigate, useSearchParams } from 'react-router-dom';
import { useTranslation } from 'react-i18next';
+import { Bot, Brain, Building2, Globe, AlertTriangle } from 'lucide-react';
import { useAuthStore } from '../stores';
import { authApi, tenantApi, fetchJson } from '../services/api';
import type { TokenResponse } from '../types';
@@ -262,6 +263,7 @@ export default function Login() {
feishu: { label: 'Feishu', icon: '/feishu.png' },
dingtalk: { label: 'DingTalk', icon: '/dingtalk.png' },
wecom: { label: 'WeCom', icon: '/wecom.png' },
+ oauth2: { label: 'SSO', icon: '' },
};
return (
@@ -281,21 +283,21 @@ export default function Login() {
-
🤖
+
{t('login.hero.features.multiAgent.title')}
{t('login.hero.features.multiAgent.description')}
-
🧠
+
{t('login.hero.features.persistentMemory.title')}
{t('login.hero.features.persistentMemory.description')}
-
🏛️
+
{t('login.hero.features.agentPlaza.title')}
{t('login.hero.features.agentPlaza.description')}
@@ -316,7 +318,7 @@ export default function Login() {
background: 'var(--bg-secondary)', border: '1px solid var(--border-subtle)',
zIndex: 101,
}} onClick={toggleLang}>
- 🌐
+
@@ -342,7 +344,7 @@ export default function Login() {
{error && (
)}
@@ -365,19 +367,7 @@ export default function Login() {
{tenant && tenant.sso_enabled && !isRegister && (
-
-
- {tenant.name}
-
-
- {t('auth.ssoNotice', 'Enterprise SSO is enabled for this domain.')}
-
-
-
+ {/* SSO 提示框已隐藏 - 2026-03-31 */}
{ssoLoading && (
{t('auth.ssoLoading', 'Loading SSO providers...')}
@@ -387,7 +377,7 @@ export default function Login() {
{!ssoLoading && ssoProviders.length > 0 && (
{ssoProviders.map(p => {
- const meta = ssoMeta[p.provider_type] || { label: p.name || p.provider_type, icon: '' };
+ const baseMeta = ssoMeta[p.provider_type] || { label: p.name || p.provider_type, icon: '' }; const meta = { ...baseMeta, label: p.name || baseMeta.label };
return (
= {
- text: '💬',
+ text: '',
notify: '·',
consult: '?',
task_delegate: '+',
diff --git a/frontend/src/pages/SSOEntry.tsx b/frontend/src/pages/SSOEntry.tsx
index 619706ec..0ed7a11b 100644
--- a/frontend/src/pages/SSOEntry.tsx
+++ b/frontend/src/pages/SSOEntry.tsx
@@ -2,6 +2,7 @@ import { useEffect, useState } from 'react';
import { useNavigate, useSearchParams } from 'react-router-dom';
import { useAuthStore } from '../stores';
import { fetchJson } from '../services/api';
+import { AlertTriangle } from 'lucide-react';
export default function SSOEntry() {
const [searchParams] = useSearchParams();
@@ -116,7 +117,7 @@ export default function SSOEntry() {
if (error) {
return (
-
⚠ Error
+
Error
{error}
);
diff --git a/frontend/src/pages/UserManagement.tsx b/frontend/src/pages/UserManagement.tsx
index b40074a4..f134312b 100644
--- a/frontend/src/pages/UserManagement.tsx
+++ b/frontend/src/pages/UserManagement.tsx
@@ -6,6 +6,7 @@ import { useTranslation } from 'react-i18next';
import { useAuthStore } from '../stores';
import LinearCopyButton from '../components/LinearCopyButton';
+import { Pencil } from "lucide-react";
interface UserInfo {
id: string;
username: string;
@@ -19,6 +20,7 @@ interface UserInfo {
quota_max_agents: number;
quota_agent_ttl_hours: number;
agents_count: number;
+ primary_mobile?: string;
feishu_open_id?: string;
created_at?: string;
source?: string;
@@ -62,6 +64,7 @@ export default function UserManagement() {
const [saving, setSaving] = useState(false);
const [toast, setToast] = useState('');
const [changingRoleUserId, setChangingRoleUserId] = useState(null);
+ const [editingProfileUser, setEditingProfileUser] = useState(null);
// Invite modal state
const [showInviteModal, setShowInviteModal] = useState(false);
@@ -107,12 +110,12 @@ export default function UserManagement() {
method: 'PATCH',
body: JSON.stringify(editForm),
});
- setToast(isChinese ? '✅ 配额已更新' : '✅ Quota updated');
+ setToast(isChinese ? '配额已更新' : 'Quota updated');
setTimeout(() => setToast(''), 2000);
setEditingUserId(null);
loadUsers();
} catch (e: any) {
- setToast(`❌ ${e.message}`);
+ setToast(`Error: ${e.message}`);
setTimeout(() => setToast(''), 3000);
}
setSaving(false);
@@ -224,13 +227,21 @@ export default function UserManagement() {
{toast && (
{toast}
)}
+ {editingProfileUser && (
+ setEditingProfileUser(null)}
+ onUpdated={() => { loadUsers(); setEditingProfileUser(null); }}
+ />
+ )}
+
{loading ? (
{t('common.loading')}...
@@ -270,7 +281,7 @@ export default function UserManagement() {
{/* Header */}
@@ -296,7 +307,7 @@ export default function UserManagement() {
{paged.map(user => (
@@ -358,14 +369,23 @@ export default function UserManagement() {
{user.quota_agent_ttl_hours}h
+
+
setEditingProfileUser(user)}
+ >
+ {isChinese ? '编辑信息' : 'Edit Info'}
+
editingUserId === user.id ? setEditingUserId(null) : startEdit(user)}
>
- {editingUserId === user.id ? t('common.cancel') : `✏️ ${t('common.edit')}`}
+ {editingUserId === user.id ? t('common.cancel') : <> {isChinese ? '配额' : 'Quota'}>}
+
{/* Inline edit form */}
@@ -519,3 +539,205 @@ export default function UserManagement() {
);
}
+
+
+// ─── Edit User Profile Modal ───────────────────────────────
+interface EditUserModalProps {
+ user: UserInfo;
+ onClose: () => void;
+ onUpdated: () => void;
+}
+
+function EditUserModal({ user, onClose, onUpdated }: EditUserModalProps) {
+ const { t, i18n } = useTranslation();
+ const isChinese = i18n.language?.startsWith('zh');
+ const { user: currentUser } = useAuthStore();
+
+ const [form, setForm] = useState({
+ display_name: user.display_name || '',
+ email: user.email || '',
+ primary_mobile: user.primary_mobile || '',
+ is_active: user.is_active,
+ new_password: '',
+ });
+ const [saving, setSaving] = useState(false);
+ const [error, setError] = useState('');
+
+ const handleSave = async () => {
+ if (!form.display_name.trim()) {
+ setError(isChinese ? '显示名称不能为空' : 'Display name is required');
+ return;
+ }
+ setSaving(true);
+ setError('');
+ try {
+ const payload: any = {
+ display_name: form.display_name.trim(),
+ email: form.email.trim(),
+ primary_mobile: form.primary_mobile.trim() || null,
+ is_active: form.is_active,
+ };
+ if (form.new_password.trim()) {
+ payload.new_password = form.new_password.trim();
+ }
+ await fetchJson(`/users/${user.id}/profile`, {
+ method: 'PATCH',
+ body: JSON.stringify(payload),
+ });
+ onUpdated();
+ onClose();
+ } catch (e: any) {
+ const detail = (() => {
+ try { return JSON.parse(e.message)?.detail; }
+ catch { return e.message; }
+ })();
+ setError(detail || (isChinese ? '保存失败' : 'Save failed'));
+ }
+ setSaving(false);
+ };
+
+ const isReadOnly = currentUser?.role === 'org_admin' && user.role === 'platform_admin';
+
+ return (
+
+
e.stopPropagation()}>
+
+
+ {isChinese ? '编辑用户信息' : 'Edit User'}
+
+
+
+
+
+
+
+
+
+ {isChinese ? '用户名' : 'Username'}: @{user.username}
+
+
+
+
+
+ {isChinese ? '显示名称' : 'Display Name'} *
+
+ setForm({ ...form, display_name: e.target.value })}
+ disabled={isReadOnly}
+ style={{ fontSize: '13px' }}
+ />
+
+
+
+
+ {isChinese ? '邮箱' : 'Email'}
+
+ setForm({ ...form, email: e.target.value })}
+ disabled={isReadOnly}
+ style={{ fontSize: '13px' }}
+ />
+
+
+
+
+ {isChinese ? '手机号' : 'Mobile'}
+
+ setForm({ ...form, primary_mobile: e.target.value })}
+ disabled={isReadOnly}
+ placeholder={isChinese ? '可选' : 'Optional'}
+ style={{ fontSize: '13px' }}
+ />
+
+
+
+
+ {t('users.newPassword', isChinese ? '新密码' : 'New Password')}
+
+
setForm({ ...form, new_password: e.target.value })}
+ disabled={isReadOnly}
+ placeholder={t('users.newPasswordPlaceholder', isChinese ? '留空则不修改密码' : 'Leave blank to keep current password')}
+ style={{ fontSize: '13px' }}
+ autoComplete="new-password"
+ />
+
+ {t('users.newPasswordHint', isChinese ? '可选。填写后将重置用户密码。' : 'Optional. If provided, user password will be reset.')}
+
+
+
+ {user.role !== 'platform_admin' && (
+
+
+
+ {isChinese ? '账号状态' : 'Account Status'}
+
+
+ {isChinese ? '禁用后用户无法登录' : 'Disabled users cannot log in'}
+
+
+
+ setForm({ ...form, is_active: e.target.checked })}
+ style={{ opacity: 0, width: 0, height: 0 }}
+ />
+
+
+
+
+
+ )}
+
+
+ {error && (
+
{error}
+ )}
+
+
+
+ {t('common.cancel', 'Cancel')}
+
+ {!isReadOnly && (
+
+ {saving ? t('common.loading', 'Loading...') : t('common.save', 'Save')}
+
+ )}
+
+
+
+ );
+}
diff --git a/frontend/src/services/api.ts b/frontend/src/services/api.ts
index 1519752b..70279827 100644
--- a/frontend/src/services/api.ts
+++ b/frontend/src/services/api.ts
@@ -210,6 +210,9 @@ export const adminApi = {
updateCompany: (id: string, data: any) =>
request
(`/tenants/${id}`, { method: 'PUT', body: JSON.stringify(data) }),
+ deleteCompany: (id: string) =>
+ request(`/admin/companies/${id}`, { method: 'DELETE' }),
+
toggleCompany: (id: string) =>
request(`/admin/companies/${id}/toggle`, { method: 'PUT' }),
@@ -218,6 +221,12 @@ export const adminApi = {
updatePlatformSettings: (data: any) =>
request('/admin/platform-settings', { method: 'PUT', body: JSON.stringify(data) }),
+
+ listCompanyCodes: (companyId: string) =>
+ request(`/admin/companies/${companyId}/invitation-codes`),
+
+ createCompanyCode: (companyId: string) =>
+ request(`/admin/companies/${companyId}/invitation-codes`, { method: 'POST' }),
};
// ─── Agents ───────────────────────────────────────────