diff --git a/.env.example b/.env.example index a8065f8..1361226 100644 --- a/.env.example +++ b/.env.example @@ -252,3 +252,13 @@ VITE_TINYMCE_API_KEY=no-api-key # # OpenAI (for AI-powered features) # OPENAI_API_KEY= + +# ============================================================================= +# OPTIONAL — OAuth Codex (ChatGPT Subscription Authentication) +# ============================================================================= +# Enable OAuth Codex to allow users to authenticate with their ChatGPT +# Plus/Pro subscription instead of using a traditional API key. +# This provides access to GPT-5.x models via chatgpt.com/backend-api/codex. +# +# CODEX_ENABLED=true +# CODEX_CLIENT_ID=app_EMoamEEZ73f0CkXaXp7hrann diff --git a/.env.grupomirandas.example b/.env.grupomirandas.example new file mode 100644 index 0000000..fd2f14a --- /dev/null +++ b/.env.grupomirandas.example @@ -0,0 +1,44 @@ +# ============================================================================= +# Evo CRM — Grupo Mirandas stack (docker-compose.grupomirandas.yml) +# ============================================================================= +# Copy this file to .env and fill in every value before running +# docker stack deploy -c docker-compose.grupomirandas.yml evocrm +# or deploying from Portainer with an env-file. +# +# Every variable listed here is substituted at deploy time by Docker Swarm +# (Portainer injects them as stack environment variables). Never commit the +# real .env — it is already gitignored. +# ============================================================================= + +# ----------------------------------------------------------------------------- +# Shared application secrets (rotate on any suspected leak) +# ----------------------------------------------------------------------------- +# Rails cookie / session secret — 128 hex chars recommended +SECRET_KEY_BASE= + +# JWT signing key shared between auth / crm / core — 64 hex chars +JWT_SECRET_KEY= + +# Doorkeeper (Rails OAuth2 provider) signing key — 64 hex chars +DOORKEEPER_JWT_SECRET_KEY= + +# Fernet key used by core + processor to encrypt API keys and OAuth data. +# Must be a 32-byte url-safe base64 value. Generate with: +# python -c "from cryptography.fernet import Fernet; print(Fernet.generate_key().decode())" +ENCRYPTION_KEY= + +# Service-to-service bearer used by processor <-> crm and bot-runtime <-> crm +EVOAI_CRM_API_TOKEN= + +# Bot runtime HMAC/bearer secret +BOT_RUNTIME_SECRET= + +# ----------------------------------------------------------------------------- +# PostgreSQL (pgvector container) +# ----------------------------------------------------------------------------- +POSTGRES_PASSWORD= + +# ----------------------------------------------------------------------------- +# SMTP (outgoing mail from auth / crm) +# ----------------------------------------------------------------------------- +SMTP_PASSWORD= diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml new file mode 100644 index 0000000..5fa6938 --- /dev/null +++ b/.github/workflows/build-test.yml @@ -0,0 +1,79 @@ +name: Build Test Images + +on: + push: + branches: [test/all-fixes] + workflow_dispatch: + +env: + REGISTRY: ghcr.io + +jobs: + build-images: + name: Build ${{ matrix.service }} + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + strategy: + fail-fast: false + matrix: + include: + - service: evo-auth-service-community + context: ./evo-auth-service-community + dockerfile: ./evo-auth-service-community/Dockerfile + image: evo-auth + - service: evo-ai-crm-community + context: ./evo-ai-crm-community + dockerfile: ./evo-ai-crm-community/docker/Dockerfile + image: evo-crm + - service: evo-ai-frontend-community + context: ./evo-ai-frontend-community + dockerfile: ./evo-ai-frontend-community/Dockerfile + image: evo-frontend + - service: evo-ai-processor-community + context: ./evo-ai-processor-community + dockerfile: ./evo-ai-processor-community/Dockerfile + image: evo-processor + - service: evo-crm-gateway + context: ./nginx + dockerfile: ./nginx/Dockerfile + image: evo-gateway + - service: evo-ai-core-service-community + context: ./evo-ai-core-service-community + dockerfile: ./evo-ai-core-service-community/Dockerfile + image: evo-core + + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Set lowercase owner + id: owner + run: echo "owner=$(echo '${{ github.repository_owner }}' | tr '[:upper:]' '[:lower:]')" >> "$GITHUB_OUTPUT" + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to GHCR + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push + uses: docker/build-push-action@v6 + with: + context: ${{ matrix.context }} + file: ${{ matrix.dockerfile }} + push: true + tags: | + ${{ env.REGISTRY }}/${{ steps.owner.outputs.owner }}/${{ matrix.image }}:test + ${{ env.REGISTRY }}/${{ steps.owner.outputs.owner }}/${{ matrix.image }}:${{ github.sha }} + cache-from: type=gha + cache-to: type=gha,mode=max + build-args: | + RAILS_ENV=production + RAILS_SERVE_STATIC_FILES=true diff --git a/.gitmodules b/.gitmodules index 7d4aa80..7e4c2e4 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,24 +1,24 @@ [submodule "evo-auth-service-community"] path = evo-auth-service-community - url = git@github.com:EvolutionAPI/evo-auth-service-community.git + url = https://github.com/EvolutionAPI/evo-auth-service-community.git [submodule "evo-ai-crm-community"] path = evo-ai-crm-community - url = git@github.com:EvolutionAPI/evo-ai-crm-community.git + url = https://github.com/NeritonDias/evo-ai-crm-community.git [submodule "evo-ai-frontend-community"] path = evo-ai-frontend-community - url = git@github.com:EvolutionAPI/evo-ai-frontend-community.git + url = https://github.com/NeritonDias/evo-ai-frontend-community.git [submodule "evo-ai-processor-community"] path = evo-ai-processor-community - url = git@github.com:EvolutionAPI/evo-ai-processor-community.git + url = https://github.com/NeritonDias/evo-ai-processor-community.git [submodule "evo-ai-core-service-community"] path = evo-ai-core-service-community - url = git@github.com:EvolutionAPI/evo-ai-core-service-community.git + url = https://github.com/EvolutionAPI/evo-ai-core-service-community.git [submodule "evolution-api"] path = evolution-api - url = git@github.com:EvolutionAPI/evolution-api.git + url = https://github.com/EvolutionAPI/evolution-api.git [submodule "evolution-go"] path = evolution-go - url = git@github.com:EvolutionAPI/evolution-go.git + url = https://github.com/EvolutionAPI/evolution-go.git [submodule "evo-bot-runtime"] path = evo-bot-runtime - url = git@github.com:EvolutionAPI/evo-bot-runtime.git + url = https://github.com/EvolutionAPI/evo-bot-runtime.git diff --git a/Makefile b/Makefile index 3c7c73f..46fc1e0 100644 --- a/Makefile +++ b/Makefile @@ -100,12 +100,12 @@ seed: seed-auth seed-crm ## Run all seeds (auth first, then CRM) seed-auth: ## Seed the Auth service (creates default user) @echo "$(CYAN)Seeding Auth service...$(RESET)" - docker compose run --rm evo-auth bash -c "bundle exec rails db:prepare && bundle exec rails db:seed" + docker compose run --rm evo-auth sh -c "bundle exec rails db:prepare && bundle exec rails db:seed" @echo "$(GREEN)Auth service seeded.$(RESET)" seed-crm: ## Seed the CRM service (creates default inbox) @echo "$(CYAN)Seeding CRM service...$(RESET)" - docker compose run --rm evo-crm bash -c "bundle exec rails db:prepare && bundle exec rails db:seed" + docker compose run --rm evo-crm sh -c "bundle exec rails db:prepare && bundle exec rails db:seed" @echo "$(GREEN)CRM service seeded.$(RESET)" ## —— Shell Access ————————————————————————————————————————————————————————————— diff --git a/docker-compose.grupomirandas.yml b/docker-compose.grupomirandas.yml new file mode 100644 index 0000000..2aae3ce --- /dev/null +++ b/docker-compose.grupomirandas.yml @@ -0,0 +1,497 @@ +version: "3.7" +services: + +## --------------------------- ORION --------------------------- ## + + evocrm_gateway: + image: ghcr.io/neritondias/evo-gateway:test + + networks: + - gmnet ## Nome da rede interna + + environment: + - AUTH_UPSTREAM=evocrm_auth:3001 + - CRM_UPSTREAM=evocrm_crm:3000 + - CORE_UPSTREAM=evocrm_core:5555 + - PROCESSOR_UPSTREAM=evocrm_processor:8000 + - BOT_RUNTIME_UPSTREAM=evocrm_bot_runtime:8080 + + deploy: + placement: + constraints: + - node.role == manager + resources: + limits: + cpus: "1" + memory: 1024M + labels: + - traefik.enable=1 + - traefik.docker.network=gmnet ## Nome da rede interna + - traefik.http.routers.evocrm_gateway.rule=Host(`api-crm.grupomirandas.com.br`) ## Domínio API (gateway) + - traefik.http.routers.evocrm_gateway.entrypoints=websecure + - traefik.http.routers.evocrm_gateway.priority=1 + - traefik.http.routers.evocrm_gateway.tls.certresolver=letsencryptresolver + - traefik.http.routers.evocrm_gateway.service=evocrm_gateway + - traefik.http.services.evocrm_gateway.loadbalancer.server.port=3030 + - traefik.http.services.evocrm_gateway.loadbalancer.passHostHeader=true + +## --------------------------- ORION --------------------------- ## + + evocrm_auth: + image: ghcr.io/neritondias/evo-auth:test + command: sh -c "bundle exec rails db:migrate 2>&1 || echo 'Migration had errors, continuing...'; bundle exec rails s -p 3001 -b 0.0.0.0" + + networks: + - gmnet ## Nome da rede interna + + environment: + ## ⚙️ Rails (ambiente) + - RAILS_ENV=production + - RAILS_MAX_THREADS=5 + + ## 🔐 Segredos e tokens + - SECRET_KEY_BASE=${SECRET_KEY_BASE} + - JWT_SECRET_KEY=${JWT_SECRET_KEY} + - EVOAI_CRM_API_TOKEN=${EVOAI_CRM_API_TOKEN} + + ## 🗄️ PostgreSQL + - POSTGRES_HOST=pgvector + - POSTGRES_PORT=5432 + - POSTGRES_USERNAME=postgres + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + - POSTGRES_DATABASE=evocrm + - POSTGRES_SSLMODE=disable + + ## 🧊 Redis + - REDIS_URL=redis://evocrm_redis:6379/1 + + ## 🌐 URLs públicas e CORS + - FRONTEND_URL=https://crm.grupomirandas.com.br + - BACKEND_URL=https://api-crm.grupomirandas.com.br + - CORS_ORIGINS=https://crm.grupomirandas.com.br,https://api-crm.grupomirandas.com.br + + ## ✉️ E-mail (Mailer + SMTP) + - SMTP_DOMAIN=grupomirandas.com.br + - MAILER_SENDER_EMAIL=notify@grupomirandas.com.br + - SMTP_USERNAME=notify@grupomirandas.com + - SMTP_PASSWORD=${SMTP_PASSWORD} + - SMTP_ADDRESS=mail.grupomirandas.com.br + - SMTP_PORT=465 + - SMTP_AUTHENTICATION=plain + - SMTP_ENABLE_STARTTLS_AUTO=true + + ## 🛂 Doorkeeper (OAuth / JWT) + - DOORKEEPER_JWT_SECRET_KEY=${DOORKEEPER_JWT_SECRET_KEY} + - DOORKEEPER_JWT_ALGORITHM=hs256 + - DOORKEEPER_JWT_ISS=evo-auth-service + + ## 🔑 MFA e filas + - MFA_ISSUER=EvoCRM + - SIDEKIQ_CONCURRENCY=10 + - ACTIVE_STORAGE_SERVICE=local + + deploy: + placement: + constraints: + - node.role == manager + resources: + limits: + cpus: "1" + memory: 1024M + +## --------------------------- ORION --------------------------- ## + + evocrm_auth_sidekiq: + image: ghcr.io/neritondias/evo-auth:test + command: ["bundle", "exec", "sidekiq", "-C", "config/sidekiq.yml"] + + networks: + - gmnet ## Nome da rede interna + + environment: + ## ⚙️ Rails (ambiente) + - RAILS_ENV=production + + ## 🔐 Segredos e tokens + - SECRET_KEY_BASE=${SECRET_KEY_BASE} + - JWT_SECRET_KEY=${JWT_SECRET_KEY} + - EVOAI_CRM_API_TOKEN=${EVOAI_CRM_API_TOKEN} + + ## 🗄️ PostgreSQL + - POSTGRES_HOST=pgvector + - POSTGRES_PORT=5432 + - POSTGRES_USERNAME=postgres + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + - POSTGRES_DATABASE=evocrm + - POSTGRES_SSLMODE=disable + + ## 🧊 Redis + - REDIS_URL=redis://evocrm_redis:6379/1 + + ## 🌐 CORS + - CORS_ORIGINS=https://crm.grupomirandas.com.br,https://api-crm.grupomirandas.com.br + + ## ✉️ E-mail (Mailer + SMTP) + - SMTP_DOMAIN=grupomirandas.com.br + - MAILER_SENDER_EMAIL=notify@grupomirandas.com.br + - SMTP_USERNAME=notify@grupomirandas.com + - SMTP_PASSWORD=${SMTP_PASSWORD} + - SMTP_ADDRESS=mail.grupomirandas.com.br + - SMTP_PORT=465 + - SMTP_AUTHENTICATION=plain + - SMTP_ENABLE_STARTTLS_AUTO=true + + ## 🛂 Doorkeeper (OAuth / JWT) + - DOORKEEPER_JWT_SECRET_KEY=${DOORKEEPER_JWT_SECRET_KEY} + - DOORKEEPER_JWT_ALGORITHM=hs256 + - DOORKEEPER_JWT_ISS=evo-auth-service + + ## 🔑 MFA e filas + - MFA_ISSUER=EvoCRM + - SIDEKIQ_CONCURRENCY=10 + - ACTIVE_STORAGE_SERVICE=local + + healthcheck: + disable: true + + deploy: + placement: + constraints: + - node.role == manager + resources: + limits: + cpus: "1" + memory: 1024M + +## --------------------------- ORION --------------------------- ## + + evocrm_crm: + image: ghcr.io/neritondias/evo-crm:test + command: sh -c "until wget -qO- http://evocrm_auth:3001/health >/dev/null 2>&1; do echo 'Waiting for auth...'; sleep 5; done; bundle exec rails db:migrate 2>&1 || echo 'Migration had errors, continuing...'; bundle exec rails s -p 3000 -b 0.0.0.0" + + networks: + - gmnet ## Nome da rede interna + + environment: + ## ⚙️ Rails (ambiente e logs) + - RAILS_ENV=production + - RAILS_SERVE_STATIC_FILES=true + - RAILS_LOG_TO_STDOUT=true + + ## 🔐 Segredos e tokens + - SECRET_KEY_BASE=${SECRET_KEY_BASE} + - JWT_SECRET_KEY=${JWT_SECRET_KEY} + - EVOAI_CRM_API_TOKEN=${EVOAI_CRM_API_TOKEN} + + ## 🗄️ PostgreSQL + - POSTGRES_HOST=pgvector + - POSTGRES_PORT=5432 + - POSTGRES_USERNAME=postgres + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + - POSTGRES_DATABASE=evocrm + - POSTGRES_SSLMODE=disable + + ## 🧊 Redis + - REDIS_URL=redis://evocrm_redis:6379/1 + + ## 🔗 Serviços internos (Auth + Core) + - EVO_AUTH_SERVICE_URL=http://evocrm_auth:3001 + - EVO_AI_CORE_SERVICE_URL=http://evocrm_core:5555 + + ## 🌐 URLs públicas e CORS + - BACKEND_URL=https://api-crm.grupomirandas.com.br + - FRONTEND_URL=https://crm.grupomirandas.com.br + - CORS_ORIGINS=https://crm.grupomirandas.com.br,https://api-crm.grupomirandas.com.br + + ## 📊 Telemetria e logs + - DISABLE_TELEMETRY=true + - LOG_LEVEL=info + + ## ✨ Funcionalidades da aplicação + - ENABLE_ACCOUNT_SIGNUP=true + - ENABLE_PUSH_RELAY_SERVER=true + - ENABLE_INBOX_EVENTS=true + + ## 🤖 Bot runtime + - BOT_RUNTIME_URL=http://evocrm_bot_runtime:8080 + - BOT_RUNTIME_SECRET=${BOT_RUNTIME_SECRET} + - BOT_RUNTIME_POSTBACK_BASE_URL=http://evocrm_crm:3000 + + deploy: + placement: + constraints: + - node.role == manager + resources: + limits: + cpus: "1" + memory: 1024M + +## --------------------------- ORION --------------------------- ## + + evocrm_crm_sidekiq: + image: ghcr.io/neritondias/evo-crm:test + command: ["bundle", "exec", "sidekiq", "-C", "config/sidekiq.yml"] + + networks: + - gmnet ## Nome da rede interna + + environment: + ## ⚙️ Rails (ambiente) + - RAILS_ENV=production + + ## 🔐 Segredos e tokens + - SECRET_KEY_BASE=${SECRET_KEY_BASE} + - JWT_SECRET_KEY=${JWT_SECRET_KEY} + - EVOAI_CRM_API_TOKEN=${EVOAI_CRM_API_TOKEN} + + ## 🗄️ PostgreSQL + - POSTGRES_HOST=pgvector + - POSTGRES_PORT=5432 + - POSTGRES_USERNAME=postgres + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + - POSTGRES_DATABASE=evocrm + - POSTGRES_SSLMODE=disable + + ## 🧊 Redis + - REDIS_URL=redis://evocrm_redis:6379/1 + + ## 🔗 Serviços internos (Auth + Core) + - EVO_AUTH_SERVICE_URL=http://evocrm_auth:3001 + - EVO_AI_CORE_SERVICE_URL=http://evocrm_core:5555 + + ## 🌐 CORS + - CORS_ORIGINS=https://crm.grupomirandas.com.br,https://api-crm.grupomirandas.com.br + + ## 🤖 Bot runtime + - BOT_RUNTIME_URL=http://evocrm_bot_runtime:8080 + - BOT_RUNTIME_SECRET=${BOT_RUNTIME_SECRET} + - BOT_RUNTIME_POSTBACK_BASE_URL=http://evocrm_crm:3000 + + deploy: + placement: + constraints: + - node.role == manager + resources: + limits: + cpus: "1" + memory: 1024M + +## --------------------------- ORION --------------------------- ## + + evocrm_core: + image: ghcr.io/neritondias/evo-core:test + + networks: + - gmnet ## Nome da rede interna + + environment: + ## 🗄️ PostgreSQL (conexão) + - DB_HOST=pgvector + - DB_PORT=5432 + - DB_USER=postgres + - DB_PASSWORD=${POSTGRES_PASSWORD} + - DB_NAME=evocrm + - DB_SSLMODE=disable + + ## 📈 Pool de conexões + - DB_MAX_IDLE_CONNS=10 + - DB_MAX_OPEN_CONNS=100 + - DB_CONN_MAX_LIFETIME=1h + - DB_CONN_MAX_IDLE_TIME=30m + + ## 🌐 API (porta) + - PORT=5555 + + ## 🔐 Segredos e JWT + - SECRET_KEY_BASE=${SECRET_KEY_BASE} + - JWT_SECRET_KEY=${JWT_SECRET_KEY} + - JWT_ALGORITHM=HS256 + - ENCRYPTION_KEY=${ENCRYPTION_KEY} + + ## 🔗 Serviços internos + - EVOLUTION_BASE_URL=http://evocrm_crm:3000 + - EVO_AUTH_BASE_URL=http://evocrm_auth:3001 + - AI_PROCESSOR_URL=http://evocrm_processor:8000 + - AI_PROCESSOR_VERSION=v1 + + deploy: + placement: + constraints: + - node.role == manager + resources: + limits: + cpus: "1" + memory: 1024M + +## --------------------------- ORION --------------------------- ## + + evocrm_processor: + image: ghcr.io/neritondias/evo-processor:test + command: sh -c "alembic upgrade head 2>&1 || echo 'Alembic migration had errors, continuing...'; python -m scripts.run_seeders; uvicorn src.main:app --host $$HOST --port $$PORT" + + volumes: + - evocrm_processor_logs:/app/logs + + networks: + - gmnet ## Nome da rede interna + + environment: + ## 🗄️ PostgreSQL + - POSTGRES_CONNECTION_STRING=postgresql://postgres:${POSTGRES_PASSWORD}@pgvector:5432/evocrm?sslmode=disable + + ## 🧊 Redis + - REDIS_HOST=evocrm_redis + - REDIS_PORT=6379 + - REDIS_PASSWORD= + - REDIS_SSL=false + - REDIS_DB=0 + - REDIS_KEY_PREFIX=a2a_ + - REDIS_TTL=3600 + + ## 🌐 Uvicorn (host / porta) + - HOST=0.0.0.0 + - PORT=8000 + + ## 🐛 Debug e segredos + - DEBUG=false + - SECRET_KEY_BASE=${SECRET_KEY_BASE} + - ENCRYPTION_KEY=${ENCRYPTION_KEY} + - EVOAI_CRM_API_TOKEN=${EVOAI_CRM_API_TOKEN} + + ## 🔗 Integração CRM e Core + - EVO_AI_CRM_URL=http://evocrm_crm:3000 + - CORE_SERVICE_URL=http://evocrm_core:5555/api/v1 + + ## 🌍 URLs públicas da API + - APP_URL=https://api-crm.grupomirandas.com.br + - API_URL=https://api-crm.grupomirandas.com.br + + ## 📋 Metadados da API (OpenAPI) + - API_TITLE=Agent Processor Community + - API_DESCRIPTION=Agent Processor Community for Evo AI + - API_VERSION=1.0.0 + - ORGANIZATION_NAME=Evo CRM + + ## 🗃️ Cache de tools + - TOOLS_CACHE_ENABLED=true + - TOOLS_CACHE_TTL=3600 + + deploy: + placement: + constraints: + - node.role == manager + resources: + limits: + cpus: "1" + memory: 1024M + +## --------------------------- ORION --------------------------- ## + + evocrm_bot_runtime: + image: evoapicloud/evo-bot-runtime:latest + + networks: + - gmnet ## Nome da rede interna + + environment: + ## 🌐 Rede (listen) + - LISTEN_ADDR=0.0.0.0:8080 + + ## 🧊 Redis + - REDIS_URL=redis://evocrm_redis:6379/1 + + ## 🤖 Processor e segurança + - AI_PROCESSOR_URL=http://evocrm_processor:8000 + - BOT_RUNTIME_SECRET=${BOT_RUNTIME_SECRET} + - AI_CALL_TIMEOUT_SECONDS=30 + + deploy: + placement: + constraints: + - node.role == manager + resources: + limits: + cpus: "1" + memory: 1024M + +## --------------------------- ORION --------------------------- ## + + evocrm_frontend: + image: ghcr.io/neritondias/evo-frontend:test + + networks: + - gmnet ## Nome da rede interna + + environment: + ## ⚙️ Vite (ambiente) + - VITE_APP_ENV=production + + ## 🌐 URLs da API (runtime — necessita suporte no entrypoint da imagem) + - VITE_API_URL=https://api-crm.grupomirandas.com.br + - VITE_AUTH_API_URL=https://api-crm.grupomirandas.com.br + - VITE_EVOAI_API_URL=https://api-crm.grupomirandas.com.br + - VITE_AGENT_PROCESSOR_URL=https://api-crm.grupomirandas.com.br + - VITE_WS_URL=wss://api-crm.grupomirandas.com.br/cable + + deploy: + placement: + constraints: + - node.role == manager + resources: + limits: + cpus: "1" + memory: 1024M + labels: + - traefik.enable=1 + - traefik.docker.network=gmnet ## Nome da rede interna + - traefik.http.routers.evocrm_frontend.rule=Host(`crm.grupomirandas.com.br`) ## Domínio frontend (React) + - traefik.http.routers.evocrm_frontend.entrypoints=websecure + - traefik.http.routers.evocrm_frontend.priority=1 + - traefik.http.routers.evocrm_frontend.tls.certresolver=letsencryptresolver + - traefik.http.routers.evocrm_frontend.service=evocrm_frontend + - traefik.http.services.evocrm_frontend.loadbalancer.server.port=80 + - traefik.http.services.evocrm_frontend.loadbalancer.passHostHeader=true + +## --------------------------- ORION --------------------------- ## + + evocrm_redis: + image: redis:latest ## Versão do Redis + command: [ + "redis-server", + "--appendonly", + "yes", + "--port", + "6379" + ] + + volumes: + - evocrm_redis:/data + + networks: + - gmnet ## Nome da rede interna + ## Descomente as linhas abaixo para uso externo + #ports: + # - 6379:6379 + deploy: + placement: + constraints: + - node.role == manager + resources: + limits: + cpus: "1" + memory: 1024M + +## --------------------------- ORION --------------------------- ## + +volumes: + evocrm_processor_logs: + external: true + name: evocrm_processor_logs + evocrm_redis: + external: true + name: evocrm_redis + +networks: + gmnet: ## Nome da rede interna + external: true + name: gmnet ## Nome da rede interna diff --git a/docker-compose.prod-test.yaml b/docker-compose.prod-test.yaml index eb683e4..15cfb35 100644 --- a/docker-compose.prod-test.yaml +++ b/docker-compose.prod-test.yaml @@ -67,7 +67,7 @@ services: # --------------------------------------------------------------------------- evo_auth: image: evoapicloud/evo-auth-service-community:latest - command: bash -c "bundle exec rails db:prepare && bundle exec rails s -p 3001 -b 0.0.0.0" + command: sh -c "bundle exec rails db:prepare && bundle exec rails s -p 3001 -b 0.0.0.0" ports: - "3001:3001" environment: @@ -104,7 +104,7 @@ services: evo_auth_sidekiq: image: evoapicloud/evo-auth-service-community:latest - command: bash -c "bundle exec sidekiq" + command: sh -c "bundle exec sidekiq" environment: RAILS_ENV: production SECRET_KEY_BASE: "a]i9F#k2$$Lm7Nq0R!sT4uW6xZ8bD1eG3hJ5oP7rV9yAcE2fH4jM6pS8vX0zB3dK5nQ7tU9wY1" @@ -128,7 +128,7 @@ services: # --------------------------------------------------------------------------- evo_crm: image: evoapicloud/evo-ai-crm-community:latest - command: bash -c "bundle exec rails db:prepare && bundle exec rails s -p 3000 -b 0.0.0.0" + command: sh -c "bundle exec rails db:prepare && bundle exec rails s -p 3000 -b 0.0.0.0" ports: - "3000:3000" environment: diff --git a/docker-compose.swarm.yaml b/docker-compose.swarm.yaml index 76eb085..b970f10 100644 --- a/docker-compose.swarm.yaml +++ b/docker-compose.swarm.yaml @@ -54,7 +54,7 @@ services: # --------------------------------------------------------------------------- evo_auth: image: evoapicloud/evo-auth-service-community:latest - command: bash -c "bundle exec rails db:migrate 2>&1 || echo 'Migration had errors, continuing...'; bundle exec rails s -p 3001 -b 0.0.0.0" + command: sh -c "bundle exec rails db:migrate 2>&1 || echo 'Migration had errors, continuing...'; bundle exec rails s -p 3001 -b 0.0.0.0" environment: RAILS_ENV: production RAILS_MAX_THREADS: 5 diff --git a/docker-compose.yml b/docker-compose.yml index 7b0d11a..1aeb89f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -61,7 +61,7 @@ services: condition: service_healthy redis: condition: service_healthy - command: bash -c "bundle install && bundle exec rails db:prepare && bundle exec rails s -p 3001 -b 0.0.0.0" + command: sh -c "bundle install && bundle exec rails db:prepare && bundle exec rails s -p 3001 -b 0.0.0.0" healthcheck: test: ["CMD", "curl", "-f", "http://localhost:3001/health"] interval: 30s @@ -84,7 +84,7 @@ services: condition: service_healthy redis: condition: service_healthy - command: bash -c "bundle install && bundle exec sidekiq" + command: sh -c "bundle install && bundle exec sidekiq" # --------------------------------------------------------------------------- # CRM Service (Ruby / Rails) — Port 3000 diff --git a/docs/OAUTH-CODEX-en.md b/docs/OAUTH-CODEX-en.md new file mode 100644 index 0000000..627ded7 --- /dev/null +++ b/docs/OAUTH-CODEX-en.md @@ -0,0 +1,242 @@ +# OAuth Codex (OpenAI) — ChatGPT Subscription Authentication for Evo CRM + +## Overview + +This implementation adds **OpenAI OAuth Codex** as an alternative authentication method in Evo CRM, allowing users with **ChatGPT Plus** ($20/mo) or **ChatGPT Pro** ($200/mo) subscriptions to use GPT-5.x models directly, without needing a separate OpenAI API key. + +The approach is **hybrid**: OAuth Codex works alongside existing API keys. No current functionality is changed or removed. + +--- + +## Solution Architecture + +### Current flow (API Keys) + +``` +User pastes API key (sk-...) in frontend + -> Backend encrypts with Fernet (AES-128-CBC) + -> Saves to api_keys.encrypted_key in PostgreSQL + -> AgentBuilder decrypts and passes to LiteLlm(model, api_key) + -> LiteLLM routes to the correct provider +``` + +### New flow (OAuth Codex) + +``` +User selects "ChatGPT (OAuth)" in frontend + -> Clicks "Connect with ChatGPT" + -> Backend initiates device code flow with auth.openai.com + -> User receives code (e.g., ABCD-1234) + -> User visits auth.openai.com/codex/device and enters the code + -> Backend receives OAuth tokens, encrypts and saves to PostgreSQL + -> AgentBuilder detects auth_type='oauth_codex' + -> Decrypts tokens, checks validity, auto-refreshes if expired + -> Passes token as Bearer to chatgpt.com/backend-api/codex + -> Response returns through existing pipeline +``` + +### Technical Decision: openai/ prefix (not chatgpt/) + +Source code analysis of LiteLLM confirmed that the `chatgpt/` provider **ignores the `api_key` parameter** and always reads tokens from a global `auth.json` file. This is incompatible with multi-tenancy (each client has their own token). + +The solution uses the `openai/` provider with custom parameters: +- `api_base` = `https://chatgpt.com/backend-api/codex` +- `api_key` = tenant's OAuth token (used as Bearer) +- `extra_headers` = ChatGPT-Account-Id, originator + +Google ADK's `LiteLlm` passes `**kwargs` via `_additional_args` to `litellm.acompletion()`, confirmed in source code (SHA 7d13696c). Each tenant gets their own instance with zero shared global state. + +--- + +## Available Models + +| Model | Minimum Plan | +|-------|-------------| +| chatgpt/gpt-5.4 | ChatGPT Plus | +| chatgpt/gpt-5.4-pro | ChatGPT Plus | +| chatgpt/gpt-5.3-codex | ChatGPT Plus | +| chatgpt/gpt-5.3-codex-spark | ChatGPT Pro | +| chatgpt/gpt-5.3-instant | ChatGPT Plus | +| chatgpt/gpt-5.2-codex | ChatGPT Plus | +| chatgpt/gpt-5.2 | ChatGPT Plus | +| chatgpt/gpt-5.1-codex-max | ChatGPT Pro | +| chatgpt/gpt-5.1-codex-mini | ChatGPT Plus | + +--- + +## Database Changes + +### Migration: `a1b2c3d4e5f6_add_oauth_codex_support` + +```sql +ALTER TABLE api_keys ADD COLUMN auth_type VARCHAR(20) DEFAULT 'api_key' NOT NULL; +ALTER TABLE api_keys ADD COLUMN oauth_data TEXT; +ALTER TABLE api_keys ALTER COLUMN encrypted_key DROP NOT NULL; + +-- Data integrity constraints +CHECK (auth_type IN ('api_key', 'oauth_codex')) +CHECK ((auth_type = 'api_key' AND encrypted_key IS NOT NULL) OR + (auth_type = 'oauth_codex' AND oauth_data IS NOT NULL)) +``` + +**Backward compatible:** existing records automatically receive `auth_type='api_key'`. + +**Reversible:** `alembic downgrade -1` removes the columns with no data loss. + +--- + +## New Endpoints + +| Method | Route | Description | +|--------|-------|-------------| +| POST | `/api/v1/agents/oauth/codex/device-code` | Initiate device code flow | +| POST | `/api/v1/agents/oauth/codex/device-poll` | Check if user authorized | +| GET | `/api/v1/agents/oauth/codex/status/{key_id}` | OAuth connection status | +| DELETE | `/api/v1/agents/oauth/codex/{key_id}` | Revoke OAuth connection | + +All require JWT + client ownership verification. + +--- + +## Security + +### LiteLLM Upgrade: v1.68.0 -> v1.83.3 + +The original repository uses `litellm>=1.68.0,<1.69.0` (released May 2025). This version has the following known vulnerabilities: + +#### CVE-2026-35030 — OIDC Authentication Bypass (CRITICAL) + +LiteLLM used only the **first 20 characters** of a JWT as the cache key. This allowed different tokens with the same first 20 characters to share the same authenticated session, enabling complete OIDC authentication bypass. + +**Fixed in:** v1.83.0 (uses full JWT hash as cache key) + +#### Supply Chain Attack — TeamPCP (March 2026) + +On March 24, 2026, LiteLLM versions **v1.82.7** and **v1.82.8** on PyPI were compromised by a threat group called TeamPCP: + +1. The group compromised **Trivy** (Aqua Security's vulnerability scanner) +2. Malicious Trivy executed in LiteLLM's CI/CD via GitHub Actions +3. Extracted PyPI publishing credentials (`PYPI_PUBLISH_PASSWORD`) via memory dump +4. Published malicious versions that: + - Collected all environment credentials (AWS, GCP, Azure, K8s, SSH, DB) + - Encrypted and exfiltrated to attacker-controlled server + - Installed persistence via systemd service + - Executed additional payloads on command + +Versions were removed from PyPI in ~40 minutes but accumulated tens of thousands of downloads. + +#### v1.83.3-stable — Secure + +Version v1.83.3 was built on the new **CI/CD v2 pipeline** with: + +| Measure | Detail | +|---------|--------| +| SHA pinning | GitHub Actions pinned by immutable commit SHA | +| Trusted Publishers (OIDC) | Short-lived tokens replace static passwords | +| Cosign signing | Docker images cryptographically signed | +| SLSA provenance | Verifiable build provenance | +| Isolated environments | Ephemeral build and publish environments | + +**Verified SHA-256 hashes:** +``` +wheel: eab4d2e1871cac0239799c33eb724d239116bf1bd275e287f92ae76ba8c7a05a +tar.gz: 38a452f708f9bb682fdfc3607aa44d68cfe936bf4a18683b0cdc5fb476424a6f +``` + +#### Compatibility: google-adk==0.3.0 + +Issue #4367 (google/adk-python) documents that LiteLLM >=1.81.3 changes the `response_schema` format for Gemini 2.0+ models. ADK 0.3.0 may have issues with structured output on those models. **OpenAI/ChatGPT/Anthropic models are NOT affected.** + +### OAuth Implementation Security + +| Aspect | Status | +|--------|--------| +| Tokens in logs | No tokens are logged (access, refresh, id) | +| Tokens in API responses | Never returned to frontend | +| Encryption at rest | Fernet (AES-128-CBC + HMAC-SHA256) | +| Thread-safety | SELECT FOR UPDATE with try/finally + db.rollback() | +| CSRF | JWT Bearer (stateless, CSRF-immune) | +| XSS | verificationUri validated (rejects javascript:) | +| SQL injection | SQLAlchemy ORM (parameterized queries) | +| Device code storage | Server-side only (never exposed to frontend) | + +--- + +## Implementation Files + +### New (6 files) + +| File | Service | +|------|---------| +| `src/config/oauth_constants.py` | Processor | +| `src/services/oauth_codex_service.py` | Processor | +| `migrations/versions/a1b2c3d4e5f6_add_oauth_codex_support.py` | Processor | +| `frontend/types/oauth.ts` | Frontend | +| `frontend/app/agents/dialogs/OAuthDeviceCodeFlow.tsx` | Frontend | +| `frontend/app/agents/components/OAuthStatusBadge.tsx` | Frontend | + +### Modified (9 files) + +| File | Change | +|------|--------| +| `src/models/models.py` | +auth_type, +oauth_data, encrypted_key nullable | +| `src/schemas/schemas.py` | +auth_type, key_value optional, +5 OAuth schemas | +| `src/utils/crypto.py` | +encrypt_oauth_data(), +decrypt_oauth_data() | +| `src/services/apikey_service.py` | auth_type in create, get_api_key_record() | +| `src/api/agent_routes.py` | +4 OAuth endpoints | +| `src/services/adk/agent_builder.py` | OAuth branch in _create_llm_agent() | +| `frontend/types/aiModels.ts` | +1 provider, +9 models | +| `frontend/services/agentService.ts` | +4 OAuth functions | +| `frontend/app/agents/dialogs/ApiKeysDialog.tsx` | Conditional OAuth UI | + +### Tests (22 tests) + +| Class | Tests | +|-------|-------| +| TestCryptoOAuthData | 3 — encryption round-trip | +| TestApiKeyAuthType | 6 — creation, defaults, validation | +| TestDeviceCodeFlow | 3 — initiate, poll pending/expired | +| TestTokenRefresh | 3 — fresh, expired, missing key | +| TestOAuthStatus | 3 — connected, disconnected, standard key | +| TestRevokeOAuth | 2 — revoke and nonexistent | +| TestModelRemapping | 3 — chatgpt/ -> openai/ | +| TestMigrationCompat | 1 — backward compatibility | + +### Nginx Gateway + +OAuth routes must be added **before** the generic `/api/v1/agents/*` route in nginx: + +```nginx +location ~ ^/api/v1/agents/oauth/ { + proxy_pass $processor_service$request_uri; +} + +location ~ ^/api/v1/agents/apikeys { + proxy_pass $processor_service$request_uri; +} +``` + +--- + +## Deployment + +### New environment variables + +```env +CODEX_ENABLED=true +CODEX_CLIENT_ID=app_EMoamEEZ73f0CkXaXp7hrann +``` + +### pyproject.toml + +```toml +# Before: +"litellm>=1.68.0,<1.69.0" + +# After: +"litellm==1.83.3" +``` + +### Migration + +Runs automatically on processor startup (`alembic upgrade head`). diff --git a/docs/OAUTH-CODEX-pt-BR.md b/docs/OAUTH-CODEX-pt-BR.md new file mode 100644 index 0000000..0023387 --- /dev/null +++ b/docs/OAUTH-CODEX-pt-BR.md @@ -0,0 +1,244 @@ +# OAuth Codex (OpenAI) — Autenticacao por Assinatura ChatGPT para Evo CRM + +## Visao Geral + +Esta implementacao adiciona **OAuth Codex da OpenAI** como metodo alternativo de autenticacao no Evo CRM, permitindo que usuarios com assinatura **ChatGPT Plus** ($20/mes) ou **ChatGPT Pro** ($200/mes) utilizem modelos GPT-5.x diretamente, sem necessidade de uma API key separada da OpenAI. + +A abordagem e **hibrida**: OAuth Codex funciona ao lado das API keys existentes. Nenhuma funcionalidade atual e alterada ou removida. + +--- + +## Arquitetura da Solucao + +### Como funciona hoje (API Keys) + +``` +Usuario cola API key (sk-...) no frontend + -> Backend criptografa com Fernet (AES-128-CBC) + -> Salva em api_keys.encrypted_key no PostgreSQL + -> AgentBuilder descriptografa e passa para LiteLlm(model, api_key) + -> LiteLLM roteia para o provider correto +``` + +### Como funciona com OAuth Codex (novo) + +``` +Usuario seleciona "ChatGPT (OAuth)" no frontend + -> Clica "Conectar com ChatGPT" + -> Backend inicia device code flow com auth.openai.com + -> Usuario recebe codigo (ex: ABCD-1234) + -> Usuario acessa auth.openai.com/codex/device e digita o codigo + -> Backend recebe tokens OAuth, criptografa e salva no PostgreSQL + -> AgentBuilder detecta auth_type='oauth_codex' + -> Descriptografa tokens, verifica validade, refresh automatico se expirado + -> Passa token como Bearer para chatgpt.com/backend-api/codex + -> Resposta retorna normalmente pelo pipeline existente +``` + +### Decisao Tecnica: openai/ prefix (nao chatgpt/) + +Analise do codigo-fonte do LiteLLM confirmou que o provider `chatgpt/` **ignora o parametro `api_key`** e sempre le tokens de um arquivo global `auth.json`. Isso e incompativel com multi-tenancy (cada cliente tem seu proprio token). + +A solucao usa o provider `openai/` com parametros customizados: +- `api_base` = `https://chatgpt.com/backend-api/codex` +- `api_key` = token OAuth do tenant (usado como Bearer) +- `extra_headers` = ChatGPT-Account-Id, originator + +O Google ADK `LiteLlm` passa `**kwargs` via `_additional_args` para `litellm.acompletion()`, confirmado no codigo-fonte (SHA 7d13696c). Cada tenant recebe sua propria instancia, sem estado global compartilhado. + +--- + +## Modelos Disponiveis + +| Modelo | Plano Minimo | +|--------|-------------| +| chatgpt/gpt-5.4 | ChatGPT Plus | +| chatgpt/gpt-5.4-pro | ChatGPT Plus | +| chatgpt/gpt-5.3-codex | ChatGPT Plus | +| chatgpt/gpt-5.3-codex-spark | ChatGPT Pro | +| chatgpt/gpt-5.3-instant | ChatGPT Plus | +| chatgpt/gpt-5.2-codex | ChatGPT Plus | +| chatgpt/gpt-5.2 | ChatGPT Plus | +| chatgpt/gpt-5.1-codex-max | ChatGPT Pro | +| chatgpt/gpt-5.1-codex-mini | ChatGPT Plus | + +--- + +## Mudancas no Banco de Dados + +### Migration: `a1b2c3d4e5f6_add_oauth_codex_support` + +```sql +ALTER TABLE api_keys ADD COLUMN auth_type VARCHAR(20) DEFAULT 'api_key' NOT NULL; +ALTER TABLE api_keys ADD COLUMN oauth_data TEXT; +ALTER TABLE api_keys ALTER COLUMN encrypted_key DROP NOT NULL; + +-- Constraints de integridade +CHECK (auth_type IN ('api_key', 'oauth_codex')) +CHECK ((auth_type = 'api_key' AND encrypted_key IS NOT NULL) OR + (auth_type = 'oauth_codex' AND oauth_data IS NOT NULL)) +``` + +**Backward compatible:** registros existentes recebem `auth_type='api_key'` automaticamente. + +**Reversivel:** `alembic downgrade -1` remove as colunas sem perda de dados. + +--- + +## Novos Endpoints + +| Metodo | Rota | Descricao | +|--------|------|-----------| +| POST | `/api/v1/agents/oauth/codex/device-code` | Inicia device code flow | +| POST | `/api/v1/agents/oauth/codex/device-poll` | Verifica se usuario autorizou | +| GET | `/api/v1/agents/oauth/codex/status/{key_id}` | Status da conexao OAuth | +| DELETE | `/api/v1/agents/oauth/codex/{key_id}` | Revogar conexao OAuth | + +Todos requerem JWT + verificacao de ownership do client. + +--- + +## Seguranca + +### Upgrade do LiteLLM: v1.68.0 -> v1.83.3 + +O repositorio original usa `litellm>=1.68.0,<1.69.0` (lancada em Maio 2025). Esta versao possui as seguintes vulnerabilidades conhecidas: + +#### CVE-2026-35030 — Bypass de Autenticacao OIDC (CRITICO) + +O LiteLLM usava apenas os **primeiros 20 caracteres** de um JWT como chave de cache. Isso permitia que tokens diferentes com os mesmos 20 caracteres iniciais compartilhassem a mesma sessao autenticada, possibilitando bypass completo da autenticacao OIDC. + +**Corrigido em:** v1.83.0 (usa hash completo do JWT como chave de cache) + +#### Supply Chain Attack — TeamPCP (Marco 2026) + +Em 24 de Marco de 2026, as versoes **v1.82.7** e **v1.82.8** do LiteLLM no PyPI foram comprometidas por um grupo chamado TeamPCP: + +1. O grupo comprometeu o **Trivy** (scanner de seguranca da Aqua Security) +2. O Trivy malicioso executou no CI/CD do LiteLLM via GitHub Actions +3. Extrairam a senha de publicacao do PyPI (`PYPI_PUBLISH_PASSWORD`) via dump de memoria +4. Publicaram versoes maliciosas que: + - Coletavam todas as credenciais do ambiente (AWS, GCP, Azure, K8s, SSH, DB) + - Criptografavam e exfiltravam para servidor controlado pelo atacante + - Instalavam persistencia via systemd service + - Executavam payloads adicionais sob comando + +As versoes foram removidas do PyPI em ~40 minutos, mas acumularam dezenas de milhares de downloads. + +#### v1.83.3-stable — Segura + +A versao v1.83.3 foi construida no novo pipeline **CI/CD v2** com: + +| Medida | Detalhe | +|--------|---------| +| SHA pinning | GitHub Actions pinadas por commit SHA imutavel | +| Trusted Publishers (OIDC) | Tokens short-lived substituem senhas estaticas | +| Cosign signing | Docker images assinadas criptograficamente | +| SLSA provenance | Build provenance verificavel | +| Ambientes isolados | Build e publish em ambientes efemeros | + +**Hashes SHA-256 verificados:** +``` +wheel: eab4d2e1871cac0239799c33eb724d239116bf1bd275e287f92ae76ba8c7a05a +tar.gz: 38a452f708f9bb682fdfc3607aa44d68cfe936bf4a18683b0cdc5fb476424a6f +``` + +#### Compatibilidade: google-adk==0.3.0 + +A issue #4367 (google/adk-python) documenta que LiteLLM >=1.81.3 muda o formato de `response_schema` para modelos Gemini 2.0+. O ADK 0.3.0 pode ter problemas com structured output nesses modelos. **Modelos OpenAI/ChatGPT/Anthropic NAO sao afetados.** + +### Seguranca da Implementacao OAuth + +| Aspecto | Status | +|---------|--------| +| Tokens em logs | Nenhum token e logado (access, refresh, id) | +| Tokens em API responses | Nunca retornados ao frontend | +| Criptografia em repouso | Fernet (AES-128-CBC + HMAC-SHA256) | +| Thread-safety | SELECT FOR UPDATE com try/finally + db.rollback() | +| CSRF | JWT Bearer (stateless, imune a CSRF) | +| XSS | verificationUri validado (rejeita javascript:) | +| SQL injection | SQLAlchemy ORM (queries parametrizadas) | +| Device code storage | Server-side (nunca exposto ao frontend) | + +--- + +## Arquivos da Implementacao + +### Novos (6 arquivos) + +| Arquivo | Servico | +|---------|---------| +| `src/config/oauth_constants.py` | Processor | +| `src/services/oauth_codex_service.py` | Processor | +| `migrations/versions/a1b2c3d4e5f6_add_oauth_codex_support.py` | Processor | +| `frontend/types/oauth.ts` | Frontend | +| `frontend/app/agents/dialogs/OAuthDeviceCodeFlow.tsx` | Frontend | +| `frontend/app/agents/components/OAuthStatusBadge.tsx` | Frontend | + +### Modificados (9 arquivos) + +| Arquivo | Mudanca | +|---------|---------| +| `src/models/models.py` | +auth_type, +oauth_data, encrypted_key nullable | +| `src/schemas/schemas.py` | +auth_type, key_value opcional, +5 OAuth schemas | +| `src/utils/crypto.py` | +encrypt_oauth_data(), +decrypt_oauth_data() | +| `src/services/apikey_service.py` | auth_type em create, get_api_key_record() | +| `src/api/agent_routes.py` | +4 endpoints OAuth | +| `src/services/adk/agent_builder.py` | Branch OAuth em _create_llm_agent() | +| `frontend/types/aiModels.ts` | +1 provider, +9 modelos | +| `frontend/services/agentService.ts` | +4 funcoes OAuth | +| `frontend/app/agents/dialogs/ApiKeysDialog.tsx` | UI condicional OAuth | + +### Testes (22 testes) + +| Classe | Testes | +|--------|--------| +| TestCryptoOAuthData | 3 — round-trip criptografia | +| TestApiKeyAuthType | 6 — criacao, defaults, validacao | +| TestDeviceCodeFlow | 3 — initiate, poll pending/expired | +| TestTokenRefresh | 3 — fresh, expired, missing key | +| TestOAuthStatus | 3 — connected, disconnected, standard key | +| TestRevokeOAuth | 2 — revoke e nonexistent | +| TestModelRemapping | 3 — chatgpt/ -> openai/ | +| TestMigrationCompat | 1 — backward compatibility | + +### Nginx Gateway + +As rotas OAuth precisam ser adicionadas **antes** da rota generica `/api/v1/agents/*` no nginx: + +```nginx +location ~ ^/api/v1/agents/oauth/ { + proxy_pass $processor_service$request_uri; + # ... headers ... +} + +location ~ ^/api/v1/agents/apikeys { + proxy_pass $processor_service$request_uri; + # ... headers ... +} +``` + +--- + +## Deploy + +### Variaveis de ambiente (novas) + +```env +CODEX_ENABLED=true +CODEX_CLIENT_ID=app_EMoamEEZ73f0CkXaXp7hrann +``` + +### pyproject.toml + +```toml +# Antes: +"litellm>=1.68.0,<1.69.0" + +# Depois: +"litellm==1.83.3" +``` + +### Migration + +Executa automaticamente no startup do processor (`alembic upgrade head`). diff --git a/evo-ai-crm-community b/evo-ai-crm-community index be18d1b..43608c6 160000 --- a/evo-ai-crm-community +++ b/evo-ai-crm-community @@ -1 +1 @@ -Subproject commit be18d1b8ca58efb2d04de3a95c109e879301a6c0 +Subproject commit 43608c614982b8528544fd684d3dd6475c3847e3 diff --git a/evo-ai-frontend-community b/evo-ai-frontend-community index 4485997..c70ad83 160000 --- a/evo-ai-frontend-community +++ b/evo-ai-frontend-community @@ -1 +1 @@ -Subproject commit 4485997fedab18df01ca906b20663f6388ddfebc +Subproject commit c70ad8366071ac9d0ec17a02ebe3e277150f34e8 diff --git a/evo-ai-processor-community b/evo-ai-processor-community index 9050693..a6fc98d 160000 --- a/evo-ai-processor-community +++ b/evo-ai-processor-community @@ -1 +1 @@ -Subproject commit 905069352825ba0f41b8aa66ebbf9c245ef1762a +Subproject commit a6fc98dca3b83946e4ebc8ead205a07679304a43 diff --git a/implementation/frontend/app/agents/components/OAuthStatusBadge.tsx b/implementation/frontend/app/agents/components/OAuthStatusBadge.tsx new file mode 100644 index 0000000..327e3a5 --- /dev/null +++ b/implementation/frontend/app/agents/components/OAuthStatusBadge.tsx @@ -0,0 +1,100 @@ +/** + * OAuth Status Badge component. + * + * New file — add to: evo-ai-frontend-community/app/agents/components/OAuthStatusBadge.tsx + * + * Compact badge showing OAuth connection status for API key list items. + * Green = connected, Red = disconnected/expired. + */ +"use client"; + +import { useState, useEffect } from "react"; +import { RefreshCw } from "lucide-react"; +import { getOAuthStatus } from "@/services/agentService"; + +interface OAuthStatusBadgeProps { + keyId: string; + clientId: string; +} + +export function OAuthStatusBadge({ keyId, clientId }: OAuthStatusBadgeProps) { + const [connected, setConnected] = useState(null); + const [planType, setPlanType] = useState(null); + const [expiresAt, setExpiresAt] = useState(null); + const [loading, setLoading] = useState(true); + + const fetchStatus = async () => { + setLoading(true); + try { + const res = await getOAuthStatus(keyId, clientId); + const data = res.data; + setConnected(data.connected); + setPlanType(data.plan_type || null); + setExpiresAt(data.expires_at || null); + } catch { + setConnected(false); + } finally { + setLoading(false); + } + }; + + useEffect(() => { + fetchStatus(); + }, [keyId, clientId]); + + const getExpiryLabel = () => { + if (!expiresAt) return null; + const exp = new Date(expiresAt); + const now = new Date(); + const hoursLeft = Math.floor((exp.getTime() - now.getTime()) / (1000 * 60 * 60)); + if (hoursLeft < 0) return "Expired"; + if (hoursLeft < 24) return `${hoursLeft}h left`; + return null; + }; + + if (loading) { + return ( + + + Checking... + + ); + } + + if (connected) { + const expiryLabel = getExpiryLabel(); + return ( + + + + Connected + + {planType && ( + + {planType === "plus" ? "Plus" : planType === "pro" ? "Pro" : planType} + + )} + {expiryLabel && ( + {expiryLabel} + )} + + + ); + } + + return ( + + + Disconnected + + ); +} diff --git a/implementation/frontend/app/agents/dialogs/ApiKeysDialog_diff.tsx b/implementation/frontend/app/agents/dialogs/ApiKeysDialog_diff.tsx new file mode 100644 index 0000000..f6a9a1c --- /dev/null +++ b/implementation/frontend/app/agents/dialogs/ApiKeysDialog_diff.tsx @@ -0,0 +1,151 @@ +/** + * ApiKeysDialog changes for OAuth Codex support. + * + * Apply to: evo-ai-frontend-community/app/agents/dialogs/ApiKeysDialog.tsx + * + * 3 changes: + * 1. Add imports for OAuthDeviceCodeFlow and OAuthStatusBadge + * 2. Add state for OAuth flow + * 3. Conditional rendering: when provider === "openai-codex", + * hide key_value field, show OAuth connect button + */ + + +// =========================================================================== +// CHANGE 1: Add imports at top of file +// =========================================================================== + +// ADD: +// import { OAuthDeviceCodeFlow } from "./OAuthDeviceCodeFlow"; +// import { OAuthStatusBadge } from "../components/OAuthStatusBadge"; + + +// =========================================================================== +// CHANGE 2: Add state variable inside the component +// =========================================================================== + +// ADD after existing state declarations: +// const [showOAuthFlow, setShowOAuthFlow] = useState(false); + + +// =========================================================================== +// CHANGE 3: Modify the form section (inside the add/edit form) +// =========================================================================== + +// In the form where provider select and key_value input are rendered, +// WRAP the key_value input in a conditional: +// +// BEFORE: +// +// +// +// AFTER: +// {currentApiKey.provider === "openai-codex" ? ( +// /* OAuth flow — no key_value needed */ +// showOAuthFlow ? ( +// { +// setShowOAuthFlow(false); +// setIsAddingApiKey(false); +// // Reload API keys list +// onOpenChange(true); /* or call loadApiKeys() */ +// }} +// onCancel={() => setShowOAuthFlow(false)} +// /> +// ) : ( +// +// ) +// ) : ( +// /* Standard API key input — UNCHANGED */ +// <> +// +// +// +// )} + + +// =========================================================================== +// CHANGE 4: Modify the key list item rendering +// =========================================================================== + +// In the list where each API key is displayed, add OAuthStatusBadge +// for OAuth keys: +// +// BEFORE (for each key in the list): +// {key.name} +// Key: ******** +// +// AFTER: +// {key.name} +// {key.auth_type === "oauth_codex" ? ( +// +// ) : ( +// Key: ******** +// )} + + +// =========================================================================== +// CHANGE 5: Hide edit key_value for OAuth keys +// =========================================================================== + +// In the edit form, when editing an OAuth key: +// - Do NOT show the key_value password field +// - Instead show "Reconnect" button that triggers OAuthDeviceCodeFlow +// - Name and is_active can still be edited normally + + +// =========================================================================== +// CHANGE 6: Add ExternalLink icon import +// =========================================================================== + +// ADD to lucide-react imports: +// import { ..., ExternalLink } from "lucide-react"; + + +// =========================================================================== +// FULL FLOW SUMMARY +// =========================================================================== +// +// Adding a standard API key (UNCHANGED): +// 1. User selects provider "OpenAI" from dropdown +// 2. User enters name + key value +// 3. Click "Add" -> POST /apikeys with auth_type="api_key" +// +// Adding an OAuth Codex key (NEW): +// 1. User selects provider "ChatGPT (OAuth)" from dropdown +// 2. Key value field is HIDDEN +// 3. User enters a name (e.g., "My ChatGPT") +// 4. Clicks "Connect with ChatGPT" button +// 5. OAuthDeviceCodeFlow component appears: +// - Shows user_code in large monospace font +// - Shows link to auth.openai.com/codex/device +// - Polls backend every 5s for authorization +// 6. User opens link, enters code, authorizes on ChatGPT +// 7. Backend receives tokens, encrypts, stores in DB +// 8. OAuthDeviceCodeFlow shows "Connected!" green checkmark +// 9. Dialog reloads key list +// 10. Key appears in list with OAuthStatusBadge (green "Connected") +// +// Viewing OAuth keys in list: +// - Shows OAuthStatusBadge instead of "Key: ********" +// - Badge shows Connected/Disconnected + plan type (Plus/Pro) +// - Refresh button to re-check status +// +// Editing OAuth keys: +// - Can edit name and is_active +// - Cannot edit key_value (doesn't exist) +// - "Reconnect" button available if token expired diff --git a/implementation/frontend/app/agents/dialogs/OAuthDeviceCodeFlow.tsx b/implementation/frontend/app/agents/dialogs/OAuthDeviceCodeFlow.tsx new file mode 100644 index 0000000..22f3bec --- /dev/null +++ b/implementation/frontend/app/agents/dialogs/OAuthDeviceCodeFlow.tsx @@ -0,0 +1,250 @@ +/** + * OAuth Device Code Flow component. + * + * New file — add to: evo-ai-frontend-community/app/agents/dialogs/OAuthDeviceCodeFlow.tsx + * + * Manages the full device code lifecycle: + * 1. Calls initiateOAuthDeviceCode to get user_code + * 2. Displays code + verification URL for user + * 3. Polls backend at interval for authorization + * 4. Shows success/error/expired states + */ +"use client"; + +import { useState, useEffect, useRef, useCallback } from "react"; +import { Button } from "@/components/ui/button"; +import { Copy, CheckCircle, XCircle, Loader2, ExternalLink } from "lucide-react"; +import { initiateOAuthDeviceCode, pollOAuthDeviceCode } from "@/services/agentService"; + +interface OAuthDeviceCodeFlowProps { + clientId: string; + name: string; + onSuccess: (keyId: string) => void; + onCancel: () => void; +} + +type FlowState = "loading" | "waiting" | "complete" | "expired" | "error"; + +export function OAuthDeviceCodeFlow({ + clientId, + name, + onSuccess, + onCancel, +}: OAuthDeviceCodeFlowProps) { + const [state, setState] = useState("loading"); + const [userCode, setUserCode] = useState(""); + const [verificationUri, setVerificationUri] = useState(""); + const [keyId, setKeyId] = useState(""); + const [errorMessage, setErrorMessage] = useState(""); + const [copied, setCopied] = useState(false); + const [secondsLeft, setSecondsLeft] = useState(0); + + const pollIntervalRef = useRef | null>(null); + const countdownRef = useRef | null>(null); + + const cleanup = useCallback(() => { + if (pollIntervalRef.current) { + clearInterval(pollIntervalRef.current); + pollIntervalRef.current = null; + } + if (countdownRef.current) { + clearInterval(countdownRef.current); + countdownRef.current = null; + } + }, []); + + // Initiate device code flow on mount + useEffect(() => { + let cancelled = false; + + async function initiate() { + try { + const res = await initiateOAuthDeviceCode(clientId, name); + if (cancelled) return; + + const data = res.data; + setUserCode(data.user_code); + setVerificationUri(data.verification_uri); + setKeyId(data.key_id); + setSecondsLeft(data.expires_in); + setState("waiting"); + + // Start polling + const interval = Math.max(data.interval, 5) * 1000; + pollIntervalRef.current = setInterval(async () => { + try { + const pollRes = await pollOAuthDeviceCode(data.key_id); + const pollData = pollRes.data; + + if (pollData.status === "complete") { + cleanup(); + setState("complete"); + onSuccess(data.key_id); + } else if (pollData.status === "expired") { + cleanup(); + setState("expired"); + setErrorMessage("Device code expired. Please try again."); + } else if (pollData.status === "error") { + cleanup(); + setState("error"); + setErrorMessage(pollData.message || "Authentication failed."); + } + // "pending" -> keep polling + } catch { + // Network error during poll — keep trying + } + }, interval); + + // Start countdown timer + countdownRef.current = setInterval(() => { + setSecondsLeft((prev) => { + if (prev <= 1) { + cleanup(); + setState("expired"); + setErrorMessage("Device code expired. Please try again."); + return 0; + } + return prev - 1; + }); + }, 1000); + } catch (err: any) { + if (cancelled) return; + setState("error"); + setErrorMessage(err?.response?.data?.detail || "Failed to start OAuth flow."); + } + } + + initiate(); + + return () => { + cancelled = true; + cleanup(); + }; + }, [clientId, name, cleanup, onSuccess]); + + const handleCopy = async () => { + await navigator.clipboard.writeText(userCode); + setCopied(true); + setTimeout(() => setCopied(false), 2000); + }; + + const formatTime = (seconds: number) => { + const m = Math.floor(seconds / 60); + const s = seconds % 60; + return `${m}:${s.toString().padStart(2, "0")}`; + }; + + // --- Loading state --- + if (state === "loading") { + return ( +
+ +

Initiating OAuth connection...

+
+ ); + } + + // --- Success state --- + if (state === "complete") { + return ( +
+ +

Connected!

+

+ Your ChatGPT subscription is now linked. +

+
+ ); + } + + // --- Error state --- + if (state === "error") { + return ( +
+ +

{errorMessage}

+ +
+ ); + } + + // --- Expired state --- + if (state === "expired") { + return ( +
+ +

{errorMessage}

+ +
+ ); + } + + // --- Waiting state (main UI) --- + return ( +
+

+ Visit the link below and enter this code to connect your ChatGPT subscription: +

+ + {/* User code display */} +
+ + {userCode} + + +
+ + {/* Verification link */} + + + Open {verificationUri} + + + {/* Polling indicator */} +
+ + Waiting for authorization... ({formatTime(secondsLeft)}) +
+ + {/* Cancel button */} + +
+ ); +} diff --git a/implementation/frontend/services/agentService_diff.ts b/implementation/frontend/services/agentService_diff.ts new file mode 100644 index 0000000..efca313 --- /dev/null +++ b/implementation/frontend/services/agentService_diff.ts @@ -0,0 +1,97 @@ +/** + * Agent service changes for OAuth Codex support. + * + * Apply to: evo-ai-frontend-community/services/agentService.ts + * + * 2 changes: + * 1. Add OAuth type imports + * 2. Add 4 new OAuth API functions at the end of the file + * + * All existing functions remain UNCHANGED. + * The existing ApiKey interface needs auth_type added. + */ + + +// =========================================================================== +// CHANGE 1: Add import at top of file +// =========================================================================== + +// ADD: +// import type { +// OAuthDeviceCodeResponse, +// OAuthPollResponse, +// OAuthStatusResponse, +// } from "@/types/oauth"; + + +// =========================================================================== +// CHANGE 2: Update ApiKey interface +// =========================================================================== + +// BEFORE: +// export interface ApiKey { +// id: string; +// name: string; +// provider: string; +// client_id: string; +// created_at: string; +// updated_at: string; +// is_active: boolean; +// } + +// AFTER: +// export interface ApiKey { +// id: string; +// name: string; +// provider: string; +// client_id: string; +// created_at: string; +// updated_at: string; +// is_active: boolean; +// auth_type: string; // <-- NEW: "api_key" or "oauth_codex" +// oauth_connected?: boolean; // <-- NEW: computed field from backend +// } + + +// =========================================================================== +// CHANGE 3: Add 4 OAuth functions at end of file +// =========================================================================== + +// ADD: +// +// // --- OAuth Codex API functions --- +// +// export const initiateOAuthDeviceCode = (clientId: string, name: string) => +// api.post("/api/v1/agents/oauth/codex/device-code", { +// client_id: clientId, +// name, +// }); +// +// export const pollOAuthDeviceCode = (keyId: string) => +// api.post("/api/v1/agents/oauth/codex/device-poll", { +// key_id: keyId, +// }); +// +// export const getOAuthStatus = (keyId: string, clientId: string) => +// api.get( +// `/api/v1/agents/oauth/codex/status/${keyId}`, +// { headers: { "x-client-id": clientId } } +// ); +// +// export const revokeOAuth = (keyId: string, clientId: string) => +// api.delete( +// `/api/v1/agents/oauth/codex/${keyId}`, +// { headers: { "x-client-id": clientId } } +// ); + + +// =========================================================================== +// IMPORTANT: API base URL +// =========================================================================== +// +// These OAuth endpoints are on the PROCESSOR service (port 8000), +// which is the same service that handles /api/v1/agents/apikeys. +// The existing `api` axios instance already points to the correct base URL +// (VITE_AGENT_PROCESSOR_URL or VITE_EVOAI_API_URL). +// +// Verify in frontend/services/api.ts that the base URL is correct. diff --git a/implementation/frontend/types/aiModels_diff.ts b/implementation/frontend/types/aiModels_diff.ts new file mode 100644 index 0000000..2a7d648 --- /dev/null +++ b/implementation/frontend/types/aiModels_diff.ts @@ -0,0 +1,59 @@ +/** + * AI Models changes for OAuth Codex support. + * + * Apply to: evo-ai-frontend-community/types/aiModels.ts + * + * 2 changes: + * 1. Add "openai-codex" to availableModelProviders + * 2. Add chatgpt/ models to availableModels + */ + + +// =========================================================================== +// CHANGE 1: Add to availableModelProviders array +// Insert after the "openai" entry +// =========================================================================== + +// BEFORE: +// export const availableModelProviders = [ +// { value: "openai", label: "OpenAI" }, +// { value: "gemini", label: "Gemini" }, +// ... + +// AFTER: +// export const availableModelProviders = [ +// { value: "openai", label: "OpenAI" }, +// { value: "openai-codex", label: "ChatGPT (OAuth)" }, // <-- NEW +// { value: "gemini", label: "Gemini" }, +// ... + + +// =========================================================================== +// CHANGE 2: Add to availableModels array +// Insert at the end of the array, after the last cohere model +// =========================================================================== + +// ADD these entries: +// +// // ChatGPT OAuth Codex models (subscription-based, no API key needed) +// { value: "chatgpt/gpt-5.4", label: "GPT-5.4", provider: "openai-codex" }, +// { value: "chatgpt/gpt-5.4-pro", label: "GPT-5.4 Pro", provider: "openai-codex" }, +// { value: "chatgpt/gpt-5.3-codex", label: "GPT-5.3 Codex", provider: "openai-codex" }, +// { value: "chatgpt/gpt-5.3-codex-spark", label: "GPT-5.3 Codex Spark", provider: "openai-codex" }, +// { value: "chatgpt/gpt-5.3-instant", label: "GPT-5.3 Instant", provider: "openai-codex" }, +// { value: "chatgpt/gpt-5.3-chat-latest", label: "GPT-5.3 Chat Latest", provider: "openai-codex" }, +// { value: "chatgpt/gpt-5.2-codex", label: "GPT-5.2 Codex", provider: "openai-codex" }, +// { value: "chatgpt/gpt-5.2", label: "GPT-5.2", provider: "openai-codex" }, +// { value: "chatgpt/gpt-5.1-codex-max", label: "GPT-5.1 Codex Max", provider: "openai-codex" }, +// { value: "chatgpt/gpt-5.1-codex-mini", label: "GPT-5.1 Codex Mini", provider: "openai-codex" }, + + +// =========================================================================== +// NOTE: No changes needed to LLMAgentConfig.tsx +// =========================================================================== +// +// The model filtering in LLMAgentConfig.tsx already works: +// model.provider === selectedKey.provider +// +// When user selects a key with provider='openai-codex', only +// chatgpt/* models will appear in the dropdown. Zero code changes needed. diff --git a/implementation/frontend/types/oauth.ts b/implementation/frontend/types/oauth.ts new file mode 100644 index 0000000..d631331 --- /dev/null +++ b/implementation/frontend/types/oauth.ts @@ -0,0 +1,27 @@ +/** + * OAuth Codex (OpenAI) TypeScript types. + * + * New file — add to: evo-ai-frontend-community/types/oauth.ts + */ + +export interface OAuthDeviceCodeResponse { + user_code: string; + verification_uri: string; + expires_in: number; + interval: number; + key_id: string; +} + +export interface OAuthPollResponse { + status: "pending" | "complete" | "expired" | "error"; + key_id?: string; + message?: string; +} + +export interface OAuthStatusResponse { + key_id: string; + connected: boolean; + expires_at?: string; + account_id?: string; + plan_type?: string; +} diff --git a/implementation/processor/migrations/versions/a1b2c3d4e5f6_add_oauth_codex_support.py b/implementation/processor/migrations/versions/a1b2c3d4e5f6_add_oauth_codex_support.py new file mode 100644 index 0000000..885826a --- /dev/null +++ b/implementation/processor/migrations/versions/a1b2c3d4e5f6_add_oauth_codex_support.py @@ -0,0 +1,53 @@ +"""add_oauth_codex_support + +Revision ID: a1b2c3d4e5f6 +Revises: 2df073c7b564 +Create Date: 2026-04-16 00:00:00.000000 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "a1b2c3d4e5f6" +down_revision: Union[str, None] = "2df073c7b564" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Add OAuth Codex support to api_keys table.""" + op.add_column( + "api_keys", + sa.Column("auth_type", sa.String(20), server_default="api_key", nullable=False), + ) + op.add_column( + "api_keys", + sa.Column("oauth_data", sa.Text(), nullable=True), + ) + op.alter_column( + "api_keys", "encrypted_key", existing_type=sa.String(), nullable=True, + ) + op.create_check_constraint( + "chk_auth_type", "api_keys", "auth_type IN ('api_key', 'oauth_codex')", + ) + op.create_check_constraint( + "chk_auth_data", "api_keys", + "(auth_type = 'api_key' AND encrypted_key IS NOT NULL) OR " + "(auth_type = 'oauth_codex' AND oauth_data IS NOT NULL)", + ) + + +def downgrade() -> None: + """Remove OAuth Codex support from api_keys table.""" + op.drop_constraint("chk_auth_data", "api_keys", type_="check") + op.drop_constraint("chk_auth_type", "api_keys", type_="check") + op.alter_column( + "api_keys", "encrypted_key", existing_type=sa.String(), nullable=False, + ) + op.drop_column("api_keys", "oauth_data") + op.drop_column("api_keys", "auth_type") diff --git a/implementation/processor/src/api/agent_routes_diff.py b/implementation/processor/src/api/agent_routes_diff.py new file mode 100644 index 0000000..bd44893 --- /dev/null +++ b/implementation/processor/src/api/agent_routes_diff.py @@ -0,0 +1,197 @@ +""" +Agent routes changes for OAuth Codex support. + +Apply to: evo-ai-processor-community/src/api/agent_routes.py + +2 changes: +1. Add OAuth schema imports to the existing import block +2. Modify create_api_key route to pass auth_type +3. Add 4 new OAuth endpoints (after existing apikey routes, before folder routes) + +All existing routes remain UNCHANGED. +""" + + +# =========================================================================== +# CHANGE 1: Add imports (line 49-58) +# =========================================================================== + +# ADD to the existing import from src.schemas.schemas: +# from src.schemas.schemas import ( +# Agent, +# AgentCreate, +# AgentFolder, +# AgentFolderCreate, +# AgentFolderUpdate, +# ApiKey, +# ApiKeyCreate, +# ApiKeyUpdate, +# OAuthDeviceCodeRequest, # <-- NEW +# OAuthDeviceCodeResponse, # <-- NEW +# OAuthDevicePollRequest, # <-- NEW +# OAuthDevicePollResponse, # <-- NEW +# OAuthStatusResponse, # <-- NEW +# ) + +# ADD new import: +# from src.services.oauth_codex_service import ( +# initiate_device_code_flow, +# poll_device_code, +# get_oauth_status, +# revoke_oauth, +# ) + + +# =========================================================================== +# CHANGE 2: Modify create_api_key route (line 111-124) +# =========================================================================== + +# BEFORE: +# @router.post("/apikeys", response_model=ApiKey, status_code=status.HTTP_201_CREATED) +# async def create_api_key( +# key: ApiKeyCreate, +# db: Session = Depends(get_db), +# payload: dict = Depends(get_jwt_token), +# ): +# """Create a new API key""" +# await verify_user_client(payload, db, key.client_id) +# db_key = apikey_service.create_api_key( +# db, key.client_id, key.name, key.provider, key.key_value +# ) +# return db_key + +# AFTER: +# @router.post("/apikeys", response_model=ApiKey, status_code=status.HTTP_201_CREATED) +# async def create_api_key( +# key: ApiKeyCreate, +# db: Session = Depends(get_db), +# payload: dict = Depends(get_jwt_token), +# ): +# """Create a new API key""" +# await verify_user_client(payload, db, key.client_id) +# db_key = apikey_service.create_api_key( +# db, key.client_id, key.name, key.provider, +# key.key_value, key.auth_type, # <-- ADDED auth_type +# ) +# return db_key + + +# =========================================================================== +# CHANGE 3: Add 4 OAuth endpoints +# Insert AFTER delete_api_key route (line ~244) and BEFORE folder routes (line ~247) +# =========================================================================== + +# --- OAuth Codex Device Code Flow --- +# +# @router.post( +# "/oauth/codex/device-code", +# response_model=OAuthDeviceCodeResponse, +# status_code=status.HTTP_200_OK, +# ) +# async def oauth_codex_device_code( +# req: OAuthDeviceCodeRequest, +# db: Session = Depends(get_db), +# payload: dict = Depends(get_jwt_token), +# ): +# """Initiate OAuth Codex device code flow. +# +# Returns a user_code and verification_uri. The user must visit the URI +# and enter the code to authorize their ChatGPT subscription. +# """ +# await verify_user_client(payload, db, req.client_id) +# return initiate_device_code_flow(db, req.client_id, req.name) +# +# +# @router.post( +# "/oauth/codex/device-poll", +# response_model=OAuthDevicePollResponse, +# status_code=status.HTTP_200_OK, +# ) +# async def oauth_codex_device_poll( +# req: OAuthDevicePollRequest, +# db: Session = Depends(get_db), +# payload: dict = Depends(get_jwt_token), +# ): +# """Poll for device code authorization status. +# +# Call this at the interval specified by device-code response. +# Returns 'pending', 'complete', 'expired', or 'error'. +# """ +# key = apikey_service.get_api_key(db, req.key_id) +# if not key: +# raise HTTPException( +# status_code=status.HTTP_404_NOT_FOUND, detail="API Key not found" +# ) +# await verify_user_client(payload, db, key.client_id) +# return poll_device_code(db, req.key_id) +# +# +# @router.get( +# "/oauth/codex/status/{key_id}", +# response_model=OAuthStatusResponse, +# status_code=status.HTTP_200_OK, +# ) +# async def oauth_codex_status( +# key_id: uuid.UUID, +# x_client_id: uuid.UUID = Header(..., alias="x-client-id"), +# db: Session = Depends(get_db), +# payload: dict = Depends(get_jwt_token), +# ): +# """Get OAuth Codex connection status for a key.""" +# await verify_user_client(payload, db, x_client_id) +# key = apikey_service.get_api_key(db, key_id) +# if not key: +# raise HTTPException( +# status_code=status.HTTP_404_NOT_FOUND, detail="API Key not found" +# ) +# if key.client_id != x_client_id: +# raise HTTPException( +# status_code=status.HTTP_403_FORBIDDEN, +# detail="API Key does not belong to the specified client", +# ) +# return get_oauth_status(db, key_id) +# +# +# @router.delete( +# "/oauth/codex/{key_id}", +# status_code=status.HTTP_204_NO_CONTENT, +# ) +# async def oauth_codex_revoke( +# key_id: uuid.UUID, +# x_client_id: uuid.UUID = Header(..., alias="x-client-id"), +# db: Session = Depends(get_db), +# payload: dict = Depends(get_jwt_token), +# ): +# """Revoke OAuth Codex connection (deactivate key and clear tokens).""" +# await verify_user_client(payload, db, x_client_id) +# key = apikey_service.get_api_key(db, key_id) +# if not key: +# raise HTTPException( +# status_code=status.HTTP_404_NOT_FOUND, detail="API Key not found" +# ) +# if key.client_id != x_client_id: +# raise HTTPException( +# status_code=status.HTTP_403_FORBIDDEN, +# detail="API Key does not belong to the specified client", +# ) +# revoke_oauth(db, key_id) + + +# =========================================================================== +# SUMMARY OF ENDPOINTS +# =========================================================================== +# +# Existing (UNCHANGED): +# POST /api/v1/agents/apikeys - Create API key +# GET /api/v1/agents/apikeys - List API keys +# GET /api/v1/agents/apikeys/{key_id} - Get API key +# PUT /api/v1/agents/apikeys/{key_id} - Update API key +# DELETE /api/v1/agents/apikeys/{key_id} - Delete API key +# +# New OAuth (added): +# POST /api/v1/agents/oauth/codex/device-code - Start device code flow +# POST /api/v1/agents/oauth/codex/device-poll - Poll for authorization +# GET /api/v1/agents/oauth/codex/status/{key_id} - Check connection status +# DELETE /api/v1/agents/oauth/codex/{key_id} - Revoke OAuth connection +# +# All endpoints require JWT + verify_user_client for ownership verification. diff --git a/implementation/processor/src/config/oauth_constants.py b/implementation/processor/src/config/oauth_constants.py new file mode 100644 index 0000000..ccc5328 --- /dev/null +++ b/implementation/processor/src/config/oauth_constants.py @@ -0,0 +1,38 @@ +""" +OAuth Codex (OpenAI) constants and configuration. + +New file — add to: evo-ai-processor-community/src/config/oauth_constants.py +""" + +import os + + +# OpenAI Codex OAuth client (public, used by Codex CLI and all OAuth tools) +CODEX_CLIENT_ID = os.getenv("CODEX_CLIENT_ID", "app_EMoamEEZ73f0CkXaXp7hrann") + +# Auth endpoints +CODEX_AUTH_BASE = "https://auth.openai.com" +CODEX_TOKEN_URL = f"{CODEX_AUTH_BASE}/oauth/token" +CODEX_DEVICE_CODE_URL = f"{CODEX_AUTH_BASE}/api/accounts/deviceauth/usercode" +CODEX_DEVICE_POLL_URL = f"{CODEX_AUTH_BASE}/api/accounts/deviceauth/token" +CODEX_DEVICE_VERIFY_URL = f"{CODEX_AUTH_BASE}/codex/device" + +# API endpoint for ChatGPT subscription models +CODEX_API_BASE = "https://chatgpt.com/backend-api/codex" + +# OAuth scopes +CODEX_SCOPES = "openid profile email offline_access" + +# Token refresh buffer (refresh if expiring within this many seconds) +CODEX_TOKEN_REFRESH_BUFFER_SECONDS = 60 + +# Device code polling timeout (15 minutes, per OpenAI spec) +CODEX_DEVICE_CODE_TIMEOUT_SECONDS = 900 + +# Required headers for Codex API calls +CODEX_ORIGINATOR = "codex_cli_rs" +CODEX_USER_AGENT = "codex_cli_rs/0.38.0" + +# JWT claim path for extracting ChatGPT account ID from id_token +CODEX_JWT_AUTH_CLAIM = "https://api.openai.com/auth" +CODEX_JWT_ACCOUNT_ID_KEY = "chatgpt_account_id" diff --git a/implementation/processor/src/models/models_diff.py b/implementation/processor/src/models/models_diff.py new file mode 100644 index 0000000..fc4e69a --- /dev/null +++ b/implementation/processor/src/models/models_diff.py @@ -0,0 +1,43 @@ +""" +ApiKey model changes for OAuth Codex support. + +Apply these changes to: evo-ai-processor-community/src/models/models.py + +DIFF — Replace the ApiKey class with: +""" + + +# --- BEFORE (original) --- +# class ApiKey(Base): +# __tablename__ = "api_keys" +# id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) +# client_id = Column(UUID(as_uuid=True), ForeignKey("clients.id", ondelete="CASCADE")) +# name = Column(String, nullable=False) +# provider = Column(String, nullable=False) +# encrypted_key = Column(String, nullable=False) <-- WAS NOT NULL +# created_at = Column(DateTime(timezone=True), server_default=func.now()) +# updated_at = Column(DateTime(timezone=True), onupdate=func.now()) +# is_active = Column(Boolean, default=True) +# client = relationship("Client", backref="api_keys") + + +# --- AFTER (with OAuth Codex) --- +# class ApiKey(Base): +# __tablename__ = "api_keys" +# id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) +# client_id = Column(UUID(as_uuid=True), ForeignKey("clients.id", ondelete="CASCADE")) +# name = Column(String, nullable=False) +# provider = Column(String, nullable=False) +# encrypted_key = Column(String, nullable=True) <-- NOW NULLABLE +# auth_type = Column(String(20), nullable=False, server_default="api_key") <-- NEW +# oauth_data = Column(Text, nullable=True) <-- NEW +# created_at = Column(DateTime(timezone=True), server_default=func.now()) +# updated_at = Column(DateTime(timezone=True), onupdate=func.now()) +# is_active = Column(Boolean, default=True) +# client = relationship("Client", backref="api_keys") + + +# Changes summary: +# 1. encrypted_key: nullable=False -> nullable=True +# 2. NEW: auth_type = Column(String(20), nullable=False, server_default="api_key") +# 3. NEW: oauth_data = Column(Text, nullable=True) diff --git a/implementation/processor/src/schemas/schemas_diff.py b/implementation/processor/src/schemas/schemas_diff.py new file mode 100644 index 0000000..66ca289 --- /dev/null +++ b/implementation/processor/src/schemas/schemas_diff.py @@ -0,0 +1,77 @@ +""" +Schema changes for OAuth Codex support. + +Apply these changes to: evo-ai-processor-community/src/schemas/schemas.py + +All Agent/Client/MCP/Tool/Folder schemas remain UNCHANGED. +Only ApiKey schemas are modified + new OAuth schemas added. +""" + + +# --- MODIFIED: ApiKeyBase --- +# BEFORE: +# class ApiKeyBase(BaseModel): +# name: str +# provider: str +# +# AFTER: +# class ApiKeyBase(BaseModel): +# name: str +# provider: str +# auth_type: str = "api_key" <-- NEW FIELD + + +# --- MODIFIED: ApiKeyCreate --- +# BEFORE: +# class ApiKeyCreate(ApiKeyBase): +# client_id: UUID4 +# key_value: str <-- WAS REQUIRED +# +# AFTER: +# class ApiKeyCreate(ApiKeyBase): +# client_id: UUID4 +# key_value: Optional[str] = None <-- NOW OPTIONAL +# +# @validator("key_value") +# def validate_key_value(cls, v, values): +# auth_type = values.get("auth_type", "api_key") +# if auth_type == "api_key" and not v: +# raise ValueError("key_value is required for api_key auth type") +# return v + + +# --- MODIFIED: ApiKeyUpdate --- +# ADDED: auth_type: Optional[str] = None + + +# --- MODIFIED: ApiKey (response) --- +# ADDED: oauth_connected: Optional[bool] = None + + +# --- NEW SCHEMAS --- + +# class OAuthDeviceCodeRequest(BaseModel): +# client_id: UUID4 +# name: str +# +# class OAuthDeviceCodeResponse(BaseModel): +# user_code: str +# verification_uri: str +# expires_in: int +# interval: int +# key_id: UUID4 +# +# class OAuthDevicePollRequest(BaseModel): +# key_id: UUID4 +# +# class OAuthDevicePollResponse(BaseModel): +# status: str # "pending", "complete", "expired", "error" +# key_id: Optional[UUID4] = None +# message: Optional[str] = None +# +# class OAuthStatusResponse(BaseModel): +# key_id: UUID4 +# connected: bool +# expires_at: Optional[datetime] = None +# account_id: Optional[str] = None +# plan_type: Optional[str] = None diff --git a/implementation/processor/src/services/adk/agent_builder_diff.py b/implementation/processor/src/services/adk/agent_builder_diff.py new file mode 100644 index 0000000..d4cd369 --- /dev/null +++ b/implementation/processor/src/services/adk/agent_builder_diff.py @@ -0,0 +1,204 @@ +""" +AgentBuilder modification for OAuth Codex support. + +Apply to: evo-ai-processor-community/src/services/adk/agent_builder.py + +This is THE critical injection point. Only the _create_llm_agent method +is modified. All other methods remain UNCHANGED. + +2 changes: +1. Add imports at top of file +2. Replace the API key resolution block (lines 135-182) in _create_llm_agent() +""" + + +# =========================================================================== +# CHANGE 1: Add imports (after line 44, before line 45) +# =========================================================================== + +# EXISTING (line 44): +# from src.services.apikey_service import get_decrypted_api_key + +# ADD after it: +# from src.services.apikey_service import get_api_key_record +# from src.services.oauth_codex_service import get_fresh_token +# from src.config.oauth_constants import ( +# CODEX_API_BASE, +# CODEX_ORIGINATOR, +# CODEX_USER_AGENT, +# ) + + +# =========================================================================== +# CHANGE 2: Replace lines 135-182 in _create_llm_agent() +# =========================================================================== + +# --- BEFORE (lines 135-182) --- +# +# # Get API key from api_key_id +# api_key = None +# +# # Get API key from api_key_id +# if hasattr(agent, "api_key_id") and agent.api_key_id: +# decrypted_key = get_decrypted_api_key(self.db, agent.api_key_id) +# if decrypted_key: +# logger.info(f"Using stored API key for agent {agent.name}") +# api_key = decrypted_key +# else: +# logger.error(f"Stored API key not found for agent {agent.name}") +# raise ValueError( +# f"API key with ID {agent.api_key_id} not found or inactive" +# ) +# else: +# # Check if there is an API key in the config (temporary field) +# config_api_key = agent.config.get("api_key") if agent.config else None +# if config_api_key: +# logger.info(f"Using config API key for agent {agent.name}") +# # Check if it is a UUID of a stored key +# try: +# key_id = uuid.UUID(config_api_key) +# decrypted_key = get_decrypted_api_key(self.db, key_id) +# if decrypted_key: +# logger.info("Config API key is a valid reference") +# api_key = decrypted_key +# else: +# # Use the key directly +# api_key = config_api_key +# except (ValueError, TypeError): +# # It is not a UUID, use directly +# api_key = config_api_key +# else: +# logger.error(f"No API key configured for agent {agent.name}") +# raise ValueError( +# f"Agent {agent.name} does not have a configured API key" +# ) +# +# return ( +# LlmAgent( +# name=agent.name, +# model=LiteLlm(model=agent.model, api_key=api_key), +# instruction=formatted_prompt, +# description=agent.description, +# tools=all_tools, +# ), +# mcp_exit_stack, +# ) + + +# --- AFTER (replace lines 135-182 with this) --- +# +# # Get API key / OAuth token for LLM authentication +# api_key = None +# model_name = agent.model +# litellm_kwargs = {} +# +# if hasattr(agent, "api_key_id") and agent.api_key_id: +# # Try to get the full key record to check auth_type +# key_record = get_api_key_record(self.db, agent.api_key_id) +# +# if key_record and key_record.auth_type == "oauth_codex": +# # === OAuth Codex Flow === +# # get_fresh_token handles expiry check and auto-refresh +# # Uses SELECT FOR UPDATE for thread-safe multi-tenant operation +# access_token, account_id = get_fresh_token( +# self.db, key_record.id +# ) +# api_key = access_token +# +# # Remap model name: "chatgpt/gpt-5.3-codex" -> "openai/gpt-5.3-codex" +# # We use openai/ prefix because chatgpt/ provider IGNORES api_key +# # parameter and reads from a global auth.json file (not multi-tenant safe). +# # With openai/ prefix, api_key is used directly as Bearer token. +# if model_name.startswith("chatgpt/"): +# model_name = "openai/" + model_name[len("chatgpt/"):] +# +# litellm_kwargs = { +# "api_base": CODEX_API_BASE, +# "extra_headers": { +# "ChatGPT-Account-Id": account_id, +# "originator": CODEX_ORIGINATOR, +# "User-Agent": CODEX_USER_AGENT, +# "accept": "text/event-stream", +# }, +# } +# logger.info(f"Using OAuth Codex token for agent {agent.name}") +# +# elif key_record: +# # === Standard API Key Flow (UNCHANGED behavior) === +# decrypted_key = get_decrypted_api_key(self.db, agent.api_key_id) +# if decrypted_key: +# logger.info(f"Using stored API key for agent {agent.name}") +# api_key = decrypted_key +# else: +# logger.error(f"Stored API key not found for agent {agent.name}") +# raise ValueError( +# f"API key with ID {agent.api_key_id} not found or inactive" +# ) +# else: +# logger.error(f"API key record not found for agent {agent.name}") +# raise ValueError( +# f"API key with ID {agent.api_key_id} not found or inactive" +# ) +# else: +# # === Config fallback (UNCHANGED behavior) === +# config_api_key = agent.config.get("api_key") if agent.config else None +# if config_api_key: +# logger.info(f"Using config API key for agent {agent.name}") +# try: +# key_id = uuid.UUID(config_api_key) +# decrypted_key = get_decrypted_api_key(self.db, key_id) +# if decrypted_key: +# logger.info("Config API key is a valid reference") +# api_key = decrypted_key +# else: +# api_key = config_api_key +# except (ValueError, TypeError): +# api_key = config_api_key +# else: +# logger.error(f"No API key configured for agent {agent.name}") +# raise ValueError( +# f"Agent {agent.name} does not have a configured API key" +# ) +# +# return ( +# LlmAgent( +# name=agent.name, +# model=LiteLlm(model=model_name, api_key=api_key, **litellm_kwargs), +# instruction=formatted_prompt, +# description=agent.description, +# tools=all_tools, +# ), +# mcp_exit_stack, +# ) + + +# =========================================================================== +# HOW IT WORKS — Technical Explanation +# =========================================================================== +# +# For auth_type == "api_key" (existing behavior): +# - get_decrypted_api_key() returns the Fernet-decrypted API key string +# - LiteLlm(model="openai/gpt-4o", api_key="sk-...") +# - litellm_kwargs is empty -> no api_base or extra_headers +# - 100% identical to current production behavior +# +# For auth_type == "oauth_codex" (new behavior): +# - get_fresh_token() returns (access_token, account_id) +# - Checks token expiry, auto-refreshes if needed +# - Uses SELECT FOR UPDATE for thread-safe concurrent access +# - Model is remapped: "chatgpt/gpt-5.3-codex" -> "openai/gpt-5.3-codex" +# - We use openai/ prefix because chatgpt/ provider in LiteLLM +# IGNORES the api_key parameter (reads from global auth.json) +# - openai/ prefix uses api_key directly as Authorization: Bearer header +# - api_base overrides endpoint to chatgpt.com/backend-api/codex +# - extra_headers adds ChatGPT-Account-Id and originator +# - LiteLlm(model="openai/gpt-5.3-codex", api_key=token, +# api_base="https://chatgpt.com/backend-api/codex", +# extra_headers={...}) +# - Google ADK's LiteLlm stores **kwargs in _additional_args +# and merges them into every litellm.acompletion() call +# - Each tenant gets their own LiteLlm instance = zero shared state +# +# Config fallback path: +# - Unchanged — still resolves api_key from agent.config +# - Only handles static API keys (no OAuth in config fallback) diff --git a/implementation/processor/src/services/apikey_service_diff.py b/implementation/processor/src/services/apikey_service_diff.py new file mode 100644 index 0000000..57ab537 --- /dev/null +++ b/implementation/processor/src/services/apikey_service_diff.py @@ -0,0 +1,120 @@ +""" +API Key service changes for OAuth Codex support. + +Apply to: evo-ai-processor-community/src/services/apikey_service.py + +3 changes: modify create_api_key, modify get_decrypted_api_key, add get_api_key_record. +All other functions (get_api_key, get_api_keys_by_client, update_api_key, delete_api_key) +remain UNCHANGED. +""" + + +# =========================================================================== +# CHANGE 1: Modify create_api_key signature and body +# =========================================================================== + +# BEFORE (line 42-71): +# def create_api_key( +# db: Session, client_id: uuid.UUID, name: str, provider: str, key_value: str +# ) -> ApiKey: +# """Create a new encrypted API key""" +# try: +# encrypted = encrypt_api_key(key_value) +# api_key = ApiKey( +# client_id=client_id, +# name=name, +# provider=provider, +# encrypted_key=encrypted, +# is_active=True, +# ) +# ... + +# AFTER: +# def create_api_key( +# db: Session, +# client_id: uuid.UUID, +# name: str, +# provider: str, +# key_value: str = None, +# auth_type: str = "api_key", +# ) -> ApiKey: +# """Create a new encrypted API key or OAuth Codex connection""" +# try: +# if auth_type == "api_key": +# if not key_value: +# raise HTTPException( +# status_code=status.HTTP_400_BAD_REQUEST, +# detail="key_value is required for api_key auth type", +# ) +# encrypted = encrypt_api_key(key_value) +# else: +# encrypted = None # OAuth keys don't store a static API key +# +# api_key = ApiKey( +# client_id=client_id, +# name=name, +# provider=provider, +# encrypted_key=encrypted, +# auth_type=auth_type, +# is_active=(auth_type == "api_key"), # OAuth keys start inactive +# ) +# ... (rest of function unchanged) + + +# =========================================================================== +# CHANGE 2: Modify get_decrypted_api_key to handle OAuth keys +# =========================================================================== + +# BEFORE (line 128-138): +# def get_decrypted_api_key(db: Session, key_id: uuid.UUID) -> Optional[str]: +# """Get the decrypted value of an API key""" +# try: +# key = get_api_key(db, key_id) +# if not key or not key.is_active: +# logger.warning(f"API key {key_id} not found or inactive") +# return None +# return decrypt_api_key(key.encrypted_key) +# except Exception as e: +# logger.error(f"Error decrypting API key {key_id}: {str(e)}") +# return None + +# AFTER: +# def get_decrypted_api_key(db: Session, key_id: uuid.UUID) -> Optional[str]: +# """Get the decrypted value of an API key. +# Returns None for OAuth keys (they use a different auth path in AgentBuilder). +# """ +# try: +# key = get_api_key(db, key_id) +# if not key or not key.is_active: +# logger.warning(f"API key {key_id} not found or inactive") +# return None +# if key.auth_type == "oauth_codex": +# return None # OAuth keys use get_fresh_token() in AgentBuilder +# return decrypt_api_key(key.encrypted_key) +# except Exception as e: +# logger.error(f"Error decrypting API key {key_id}: {str(e)}") +# return None + + +# =========================================================================== +# CHANGE 3: Add new function get_api_key_record (after get_decrypted_api_key) +# =========================================================================== + +# ADD after get_decrypted_api_key (line ~139): +# +# def get_api_key_record(db: Session, key_id: uuid.UUID) -> Optional[ApiKey]: +# """Get the full ApiKey record for auth_type checking in AgentBuilder. +# +# Unlike get_decrypted_api_key which returns the decrypted key string, +# this returns the full ORM object so the caller can check auth_type +# and route to the appropriate authentication path. +# """ +# try: +# key = get_api_key(db, key_id) +# if not key or not key.is_active: +# logger.warning(f"API key record {key_id} not found or inactive") +# return None +# return key +# except Exception as e: +# logger.error(f"Error getting API key record {key_id}: {str(e)}") +# return None diff --git a/implementation/processor/src/services/oauth_codex_service.py b/implementation/processor/src/services/oauth_codex_service.py new file mode 100644 index 0000000..04a67b6 --- /dev/null +++ b/implementation/processor/src/services/oauth_codex_service.py @@ -0,0 +1,425 @@ +""" +OAuth Codex service — v2 (post debug sweep fixes). + +REPLACES: oauth_codex_service.py + +Fixes applied from Debug Sweep (Phase 8): +- [HIGH] device_code stored server-side in pending oauth_data (Agent 3, Item 13) +- [HIGH] get_fresh_token wrapped in try/finally with db.rollback() (Agent 2, Items 2,5,9) +- [HIGH] db.refresh(key) after FOR UPDATE lock (Agent 2, Item 3) +- [HIGH] 401 handler restructured — single transaction, no double-commit (Agent 2, Items 2,10) +- [MEDIUM] Name validation in initiate_device_code_flow (Agent 3, Item 12) +- [MEDIUM] Log message sanitized in _extract_account_id (Agent 1, Item 1) +""" + +import time +import logging +import uuid +from typing import Tuple, Optional + +import httpx +import jwt + +from sqlalchemy.orm import Session + +from src.models.models import ApiKey +from src.utils.crypto import encrypt_oauth_data, decrypt_oauth_data +from src.config.oauth_constants import ( + CODEX_CLIENT_ID, + CODEX_TOKEN_URL, + CODEX_DEVICE_CODE_URL, + CODEX_DEVICE_POLL_URL, + CODEX_DEVICE_VERIFY_URL, + CODEX_SCOPES, + CODEX_TOKEN_REFRESH_BUFFER_SECONDS, + CODEX_JWT_AUTH_CLAIM, + CODEX_JWT_ACCOUNT_ID_KEY, + CODEX_API_BASE, + CODEX_ORIGINATOR, + CODEX_USER_AGENT, +) +from src.schemas.schemas import ( + OAuthDeviceCodeResponse, + OAuthDevicePollResponse, + OAuthStatusResponse, +) + +logger = logging.getLogger(__name__) + +_HTTP_TIMEOUT = 30.0 + + +def _extract_account_id(id_token: str) -> Optional[str]: + """Extract chatgpt_account_id from JWT claims. + + Note: verify_signature=False is intentional and safe here. We are only extracting + the account_id metadata field from a token received over TLS directly from + auth.openai.com. We do NOT use this data for authentication decisions — the + token itself is passed as-is to the OpenAI API which validates it server-side. + Verifying the signature would require fetching OpenAI's JWKS endpoint on every + call, adding latency and a network dependency with no security benefit. + """ + try: + claims = jwt.decode(id_token, options={"verify_signature": False}) # nosec: see docstring + auth_claims = claims.get(CODEX_JWT_AUTH_CLAIM) + if isinstance(auth_claims, dict): + account_id = auth_claims.get(CODEX_JWT_ACCOUNT_ID_KEY) + if isinstance(account_id, str) and account_id: + return account_id + return None + except Exception: + # FIX Agent 1 Item 1: Don't log str(e) — JWT lib may include token fragments + logger.warning("Failed to parse id_token for account_id extraction") + return None + + +def _extract_token_expiry(access_token: str) -> float: + """Extract expiration timestamp from access_token JWT. Falls back to now+3600. + + Note: verify_signature=False is intentional — same rationale as _extract_account_id. + We only read the 'exp' claim for cache TTL purposes, not for security decisions. + """ + try: + claims = jwt.decode(access_token, options={"verify_signature": False}) # nosec: see docstring + exp = claims.get("exp") + if exp: + return float(exp) + except Exception: + pass + return time.time() + 3600 + + +# --------------------------------------------------------------------------- +# 1. INITIATE DEVICE CODE FLOW +# --------------------------------------------------------------------------- + +def initiate_device_code_flow( + db: Session, client_id: uuid.UUID, name: str +) -> OAuthDeviceCodeResponse: + """Start OAuth device code flow. Creates a pending ApiKey record.""" + + # FIX Agent 3 Item 12: Validate name is not empty/whitespace + if not name or not name.strip(): + raise ValueError("Name is required to start the OAuth flow") + + with httpx.Client(timeout=_HTTP_TIMEOUT) as client: + resp = client.post( + CODEX_DEVICE_CODE_URL, + json={"client_id": CODEX_CLIENT_ID}, + ) + resp.raise_for_status() + data = resp.json() + + device_code = data.get("device_auth_id") or data.get("device_code") + user_code = data.get("user_code") + interval = data.get("interval", 5) + + if not device_code or not user_code: + raise ValueError("Invalid response from OpenAI device code endpoint") + + # FIX Agent 3 Item 13: Store device_code server-side in pending oauth_data + # so poll_device_code can retrieve it without the frontend sending it + pending_oauth = encrypt_oauth_data({"pending_device_code": device_code}) + + pending_key = ApiKey( + id=uuid.uuid4(), + client_id=client_id, + name=name.strip(), + provider="openai-codex", + auth_type="oauth_codex", + encrypted_key=None, + oauth_data=pending_oauth, # FIX: now stores pending device_code + is_active=False, + ) + db.add(pending_key) + db.commit() + db.refresh(pending_key) + + logger.info(f"OAuth device code flow initiated for client {client_id}, key_id={pending_key.id}") + + return OAuthDeviceCodeResponse( + user_code=user_code, + verification_uri=CODEX_DEVICE_VERIFY_URL, + expires_in=900, + interval=interval, + key_id=pending_key.id, + ) + + +# --------------------------------------------------------------------------- +# 2. POLL DEVICE CODE +# --------------------------------------------------------------------------- + +def poll_device_code( + db: Session, key_id: uuid.UUID +) -> OAuthDevicePollResponse: + """Poll OpenAI for device code authorization status. + + FIX Agent 3 Item 13: device_code is now read from server-side storage + instead of being passed from the frontend. + """ + key = db.query(ApiKey).filter(ApiKey.id == key_id).first() + if not key: + return OAuthDevicePollResponse(status="error", message="Key not found") + + # Already completed + if key.is_active and key.oauth_data: + stored = decrypt_oauth_data(key.oauth_data) + if "access_token" in stored: + return OAuthDevicePollResponse(status="complete", key_id=key_id) + + # FIX Agent 3 Item 13: Read device_code from server-side storage + device_code = None + if key.oauth_data: + stored = decrypt_oauth_data(key.oauth_data) + device_code = stored.get("pending_device_code") + + if not device_code: + return OAuthDevicePollResponse( + status="error", key_id=key_id, message="Missing device code" + ) + + # Poll OpenAI + with httpx.Client(timeout=_HTTP_TIMEOUT) as client: + resp = client.post( + CODEX_DEVICE_POLL_URL, + json={ + "client_id": CODEX_CLIENT_ID, + "device_auth_id": device_code, + }, + ) + + if resp.status_code in (403, 428): + return OAuthDevicePollResponse(status="pending", key_id=key_id) + + if resp.status_code == 410: + key.is_active = False + db.commit() + return OAuthDevicePollResponse( + status="expired", key_id=key_id, message="Device code expired" + ) + + if resp.status_code != 200: + return OAuthDevicePollResponse( + status="error", key_id=key_id, + message=f"Unexpected status {resp.status_code}", + ) + + # Success — extract authorization_code and exchange for tokens + poll_data = resp.json() + authorization_code = poll_data.get("authorization_code") + code_verifier = poll_data.get("code_verifier") + + if not authorization_code: + return OAuthDevicePollResponse( + status="error", key_id=key_id, message="No authorization_code in response" + ) + + with httpx.Client(timeout=_HTTP_TIMEOUT) as client: + token_resp = client.post( + CODEX_TOKEN_URL, + data={ + "grant_type": "authorization_code", + "code": authorization_code, + "client_id": CODEX_CLIENT_ID, + "code_verifier": code_verifier or "", + "redirect_uri": "http://localhost:1455/auth/callback", + }, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + ) + token_resp.raise_for_status() + tokens = token_resp.json() + + access_token = tokens.get("access_token") + refresh_token = tokens.get("refresh_token") + id_token = tokens.get("id_token", "") + + if not access_token or not refresh_token: + return OAuthDevicePollResponse( + status="error", key_id=key_id, message="Missing tokens in response" + ) + + account_id = _extract_account_id(id_token) or _extract_account_id(access_token) or "" + expires_at = _extract_token_expiry(access_token) + + oauth_data = { + "access_token": access_token, + "refresh_token": refresh_token, + "id_token": id_token, + "expires_at": expires_at, + "account_id": account_id, + "plan_type": "plus", + } + key.oauth_data = encrypt_oauth_data(oauth_data) + key.is_active = True + db.commit() + + logger.info(f"OAuth Codex connected for key_id={key_id}, account={account_id}") + return OAuthDevicePollResponse(status="complete", key_id=key_id) + + +# --------------------------------------------------------------------------- +# 3. GET FRESH TOKEN (thread-safe, with all debug fixes) +# --------------------------------------------------------------------------- + +def get_fresh_token(db: Session, key_id: uuid.UUID) -> Tuple[str, str]: + """Get a valid access token for the given OAuth key. + + Thread-safe: uses SELECT FOR UPDATE with try/finally rollback. + Auto-refreshes expired tokens. + + FIX Agent 2 Items 2,3,5,9,10: Consolidated fix with try/finally, + db.refresh(), single-transaction 401 handling. + """ + try: + # Row-level lock to prevent concurrent refresh + key = ( + db.query(ApiKey) + .filter(ApiKey.id == key_id, ApiKey.is_active == True) + .with_for_update() + .first() + ) + + if not key or not key.oauth_data: + raise ValueError(f"OAuth key {key_id} not found or not connected") + + # FIX Agent 2 Item 3: Force re-read after lock to bypass identity map cache + db.refresh(key) + + oauth = decrypt_oauth_data(key.oauth_data) + access_token = oauth.get("access_token", "") + refresh_token = oauth.get("refresh_token", "") + expires_at = oauth.get("expires_at", 0) + account_id = oauth.get("account_id", "") + + # Token still valid — return it + if expires_at > time.time() + CODEX_TOKEN_REFRESH_BUFFER_SECONDS: + db.commit() # Release FOR UPDATE lock + return access_token, account_id + + # --- Token expired or near-expiry: refresh --- + logger.info(f"Refreshing OAuth token for key_id={key_id}") + + try: + with httpx.Client(timeout=_HTTP_TIMEOUT) as client: + resp = client.post( + CODEX_TOKEN_URL, + data={ + "client_id": CODEX_CLIENT_ID, + "grant_type": "refresh_token", + "refresh_token": refresh_token, + "scope": CODEX_SCOPES, + }, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + ) + resp.raise_for_status() + new_tokens = resp.json() + except httpx.HTTPStatusError as e: + if e.response.status_code == 401: + # FIX Agent 2 Item 2,10: Single transaction, no double-commit + key.is_active = False + db.commit() + raise ValueError( + f"OAuth refresh token revoked for key_id={key_id}. " + "User must re-authenticate via device code flow." + ) + # Non-401 server error: try stale token grace period + if expires_at > time.time() - 300: + db.commit() + logger.warning(f"Token refresh got {e.response.status_code} for key_id={key_id}, using stale token") + return access_token, account_id + raise + except httpx.HTTPError: + # Network error — return stale token if within grace period + if expires_at > time.time() - 300: + db.commit() + logger.warning(f"Token refresh network error for key_id={key_id}, using stale token") + return access_token, account_id + raise ValueError(f"Token refresh failed for key_id={key_id} and token is expired") + + # Update stored tokens + new_access = new_tokens.get("access_token", access_token) + new_refresh = new_tokens.get("refresh_token", refresh_token) + new_id_token = new_tokens.get("id_token", oauth.get("id_token", "")) + new_expires_at = _extract_token_expiry(new_access) + new_account_id = ( + _extract_account_id(new_id_token) + or _extract_account_id(new_access) + or account_id + ) + + updated_oauth = { + "access_token": new_access, + "refresh_token": new_refresh, + "id_token": new_id_token, + "expires_at": new_expires_at, + "account_id": new_account_id, + "plan_type": oauth.get("plan_type", "plus"), + } + key.oauth_data = encrypt_oauth_data(updated_oauth) + db.commit() + + logger.info(f"OAuth token refreshed for key_id={key_id}") + return new_access, new_account_id + + except Exception: + # FIX Agent 2 Items 2,5,9: Always release FOR UPDATE lock on any exception + db.rollback() + raise + + +# --------------------------------------------------------------------------- +# 4. GET OAUTH STATUS +# --------------------------------------------------------------------------- + +def get_oauth_status(db: Session, key_id: uuid.UUID) -> OAuthStatusResponse: + """Get connection status for an OAuth Codex key.""" + key = db.query(ApiKey).filter(ApiKey.id == key_id).first() + + if not key or key.auth_type != "oauth_codex": + return OAuthStatusResponse(key_id=key_id, connected=False) + + if not key.oauth_data or not key.is_active: + return OAuthStatusResponse(key_id=key_id, connected=False) + + oauth = decrypt_oauth_data(key.oauth_data) + + # Pending keys (only have pending_device_code) are not "connected" + if "access_token" not in oauth: + return OAuthStatusResponse(key_id=key_id, connected=False) + + from datetime import datetime, timezone + expires_at_dt = None + exp_ts = oauth.get("expires_at", 0) + if exp_ts: + expires_at_dt = datetime.fromtimestamp(exp_ts, tz=timezone.utc) + + return OAuthStatusResponse( + key_id=key_id, + connected=True, + expires_at=expires_at_dt, + account_id=oauth.get("account_id"), + plan_type=oauth.get("plan_type"), + ) + + +# --------------------------------------------------------------------------- +# 5. REVOKE OAUTH +# --------------------------------------------------------------------------- + +def revoke_oauth(db: Session, key_id: uuid.UUID) -> bool: + """Revoke OAuth connection. + + Deletes the ApiKey record entirely instead of setting oauth_data=None, + because the chk_auth_data CHECK constraint requires oauth_data IS NOT NULL + for auth_type='oauth_codex' rows. + """ + key = db.query(ApiKey).filter(ApiKey.id == key_id).first() + if not key: + return False + + db.delete(key) + db.commit() + + logger.info(f"OAuth Codex revoked and deleted for key_id={key_id}") + return True diff --git a/implementation/processor/src/utils/crypto_diff.py b/implementation/processor/src/utils/crypto_diff.py new file mode 100644 index 0000000..00f54e4 --- /dev/null +++ b/implementation/processor/src/utils/crypto_diff.py @@ -0,0 +1,44 @@ +""" +Crypto extension for OAuth data encryption. + +Apply to: evo-ai-processor-community/src/utils/crypto.py + +ADD these two functions at the end of the existing file. +The existing encrypt_api_key() and decrypt_api_key() remain unchanged. +""" + + +# --- ADD AFTER existing functions (line 69) --- + +# import json <-- add to imports at top of file + +# def encrypt_oauth_data(oauth_dict: dict) -> str: +# """Encrypt OAuth token data (dict -> JSON -> Fernet encrypted string). +# +# Used to store OAuth access_token, refresh_token, id_token, account_id +# securely in the api_keys.oauth_data column. +# """ +# if not oauth_dict: +# return "" +# try: +# json_str = json.dumps(oauth_dict) +# return fernet.encrypt(json_str.encode()).decode() +# except Exception as e: +# logger.error(f"Error encrypting OAuth data: {str(e)}") +# raise +# +# +# def decrypt_oauth_data(encrypted_data: str) -> dict: +# """Decrypt Fernet-encrypted OAuth data back to dict. +# +# Returns dict with keys: access_token, refresh_token, id_token, +# expires_at, account_id, plan_type +# """ +# if not encrypted_data: +# return {} +# try: +# json_str = fernet.decrypt(encrypted_data.encode()).decode() +# return json.loads(json_str) +# except Exception as e: +# logger.error(f"Error decrypting OAuth data: {str(e)}") +# raise diff --git a/implementation/processor/tests/test_oauth_codex.py b/implementation/processor/tests/test_oauth_codex.py new file mode 100644 index 0000000..f8ff6b2 --- /dev/null +++ b/implementation/processor/tests/test_oauth_codex.py @@ -0,0 +1,468 @@ +""" +OAuth Codex integration tests. + +New file — add to: evo-ai-processor-community/tests/test_oauth_codex.py + +Run with: pytest tests/test_oauth_codex.py -v + +These tests mock external HTTP calls to auth.openai.com and verify +the full OAuth flow from device code initiation through token refresh. +""" + +import time +import json +import uuid +import pytest +from unittest.mock import patch, MagicMock + +from src.models.models import ApiKey, Client +from src.utils.crypto import encrypt_api_key, decrypt_api_key, encrypt_oauth_data, decrypt_oauth_data +from src.services.oauth_codex_service import ( + initiate_device_code_flow, + poll_device_code, + get_fresh_token, + get_oauth_status, + revoke_oauth, +) +from src.services.apikey_service import ( + create_api_key, + get_api_key, + get_decrypted_api_key, + get_api_key_record, +) + + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + +@pytest.fixture +def test_client_record(db_session): + """Create a test client in the DB.""" + client = Client( + id=uuid.uuid4(), + name="Test Client", + email="test@example.com", + ) + db_session.add(client) + db_session.commit() + db_session.refresh(client) + return client + + +@pytest.fixture +def mock_device_code_response(): + """Mock response from OpenAI device code endpoint.""" + return { + "device_auth_id": "dev_auth_abc123", + "user_code": "ABCD-1234", + "interval": 5, + } + + +@pytest.fixture +def mock_poll_success_response(): + """Mock successful poll response with authorization_code.""" + return { + "authorization_code": "auth_code_xyz789", + "code_verifier": "verifier_abc", + "code_challenge": "challenge_def", + } + + +@pytest.fixture +def mock_token_response(): + """Mock token exchange response.""" + # Build a minimal JWT for testing (not cryptographically valid) + import base64 + header = base64.urlsafe_b64encode(json.dumps({"alg": "none"}).encode()).decode().rstrip("=") + payload_data = { + "exp": int(time.time()) + 3600, + "https://api.openai.com/auth": { + "chatgpt_account_id": "user-test-account-123", + }, + } + payload = base64.urlsafe_b64encode(json.dumps(payload_data).encode()).decode().rstrip("=") + fake_jwt = f"{header}.{payload}.fake_signature" + + return { + "access_token": fake_jwt, + "refresh_token": "refresh_token_test_456", + "id_token": fake_jwt, + "token_type": "Bearer", + "expires_in": 3600, + } + + +# --------------------------------------------------------------------------- +# Test: Crypto round-trip for OAuth data +# --------------------------------------------------------------------------- + +class TestCryptoOAuthData: + def test_encrypt_decrypt_oauth_data(self): + """OAuth data survives Fernet encrypt/decrypt round-trip.""" + original = { + "access_token": "eyJ_test_access_token", + "refresh_token": "test_refresh_token", + "id_token": "eyJ_test_id_token", + "expires_at": 1745000000.0, + "account_id": "user-abc123", + "plan_type": "plus", + } + encrypted = encrypt_oauth_data(original) + assert encrypted != json.dumps(original) # Not plaintext + assert isinstance(encrypted, str) + + decrypted = decrypt_oauth_data(encrypted) + assert decrypted == original + + def test_encrypt_empty_dict(self): + """Empty dict encrypts to empty string.""" + assert encrypt_oauth_data({}) == "" + + def test_decrypt_empty_string(self): + """Empty string decrypts to empty dict.""" + assert decrypt_oauth_data("") == {} + + +# --------------------------------------------------------------------------- +# Test: API Key creation with auth_type +# --------------------------------------------------------------------------- + +class TestApiKeyAuthType: + def test_create_standard_api_key(self, db_session, test_client_record): + """Standard API key creation with auth_type='api_key' works as before.""" + key = create_api_key( + db_session, test_client_record.id, "Test Key", "openai", "sk-test123" + ) + assert key.auth_type == "api_key" + assert key.encrypted_key is not None + assert key.is_active is True + assert key.oauth_data is None + + def test_create_api_key_requires_key_value(self, db_session, test_client_record): + """auth_type='api_key' without key_value raises error.""" + with pytest.raises(Exception): + create_api_key( + db_session, test_client_record.id, "No Key", "openai", + key_value=None, auth_type="api_key", + ) + + def test_create_oauth_key_no_encrypted_key(self, db_session, test_client_record): + """auth_type='oauth_codex' creates key without encrypted_key.""" + key = create_api_key( + db_session, test_client_record.id, "OAuth Key", "openai-codex", + key_value=None, auth_type="oauth_codex", + ) + assert key.auth_type == "oauth_codex" + assert key.encrypted_key is None + assert key.is_active is False # OAuth keys start inactive + + def test_get_decrypted_key_returns_none_for_oauth(self, db_session, test_client_record): + """get_decrypted_api_key returns None for OAuth keys.""" + key = create_api_key( + db_session, test_client_record.id, "OAuth Key", "openai-codex", + key_value=None, auth_type="oauth_codex", + ) + # Manually activate to test the auth_type check + key.is_active = True + key.oauth_data = encrypt_oauth_data({"access_token": "test"}) + db_session.commit() + + result = get_decrypted_api_key(db_session, key.id) + assert result is None # Should NOT return OAuth tokens via this function + + def test_get_api_key_record_returns_full_object(self, db_session, test_client_record): + """get_api_key_record returns full ORM object with auth_type.""" + key = create_api_key( + db_session, test_client_record.id, "Test Key", "openai", "sk-test" + ) + record = get_api_key_record(db_session, key.id) + assert record is not None + assert record.auth_type == "api_key" + assert record.id == key.id + + def test_existing_keys_default_to_api_key(self, db_session, test_client_record): + """Existing keys without auth_type get default 'api_key'.""" + key = create_api_key( + db_session, test_client_record.id, "Old Key", "openai", "sk-old" + ) + assert key.auth_type == "api_key" # server_default + + +# --------------------------------------------------------------------------- +# Test: Device code flow +# --------------------------------------------------------------------------- + +class TestDeviceCodeFlow: + @patch("src.services.oauth_codex_service.httpx.Client") + def test_initiate_device_code_flow( + self, mock_client_cls, db_session, test_client_record, mock_device_code_response + ): + """Device code flow creates pending key and returns user_code.""" + mock_resp = MagicMock() + mock_resp.status_code = 200 + mock_resp.json.return_value = mock_device_code_response + mock_resp.raise_for_status = MagicMock() + mock_client_cls.return_value.__enter__ = MagicMock(return_value=MagicMock(post=MagicMock(return_value=mock_resp))) + mock_client_cls.return_value.__exit__ = MagicMock(return_value=False) + + result = initiate_device_code_flow( + db_session, test_client_record.id, "My ChatGPT" + ) + + assert result.user_code == "ABCD-1234" + assert result.verification_uri == "https://auth.openai.com/codex/device" + assert result.key_id is not None + + # Verify pending key was created + key = get_api_key(db_session, result.key_id) + assert key is not None + assert key.auth_type == "oauth_codex" + assert key.is_active is False + assert key.provider == "openai-codex" + + @patch("src.services.oauth_codex_service.httpx.Client") + def test_poll_pending(self, mock_client_cls, db_session, test_client_record): + """Poll returns 'pending' when user hasn't authorized yet.""" + # Create pending key + key = ApiKey( + id=uuid.uuid4(), client_id=test_client_record.id, + name="Pending", provider="openai-codex", + auth_type="oauth_codex", is_active=False, + ) + db_session.add(key) + db_session.commit() + + mock_resp = MagicMock() + mock_resp.status_code = 403 # authorization_pending + mock_client_cls.return_value.__enter__ = MagicMock(return_value=MagicMock(post=MagicMock(return_value=mock_resp))) + mock_client_cls.return_value.__exit__ = MagicMock(return_value=False) + + result = poll_device_code(db_session, key.id, "dev_auth_abc") + assert result.status == "pending" + + @patch("src.services.oauth_codex_service.httpx.Client") + def test_poll_expired(self, mock_client_cls, db_session, test_client_record): + """Poll returns 'expired' when device code times out.""" + key = ApiKey( + id=uuid.uuid4(), client_id=test_client_record.id, + name="Expiring", provider="openai-codex", + auth_type="oauth_codex", is_active=False, + ) + db_session.add(key) + db_session.commit() + + mock_resp = MagicMock() + mock_resp.status_code = 410 # expired + mock_client_cls.return_value.__enter__ = MagicMock(return_value=MagicMock(post=MagicMock(return_value=mock_resp))) + mock_client_cls.return_value.__exit__ = MagicMock(return_value=False) + + result = poll_device_code(db_session, key.id, "dev_auth_abc") + assert result.status == "expired" + + +# --------------------------------------------------------------------------- +# Test: Token refresh +# --------------------------------------------------------------------------- + +class TestTokenRefresh: + def _create_oauth_key(self, db_session, client_id, expires_at): + """Helper to create an OAuth key with specific expiration.""" + oauth_data = { + "access_token": "old_access_token", + "refresh_token": "test_refresh_token", + "id_token": "test_id_token", + "expires_at": expires_at, + "account_id": "user-test-123", + "plan_type": "plus", + } + key = ApiKey( + id=uuid.uuid4(), client_id=client_id, + name="OAuth Key", provider="openai-codex", + auth_type="oauth_codex", + oauth_data=encrypt_oauth_data(oauth_data), + is_active=True, + ) + db_session.add(key) + db_session.commit() + return key + + def test_fresh_token_no_refresh_needed(self, db_session, test_client_record): + """Token that expires in >60s is returned directly without refresh.""" + key = self._create_oauth_key( + db_session, test_client_record.id, + expires_at=time.time() + 3600, # 1 hour from now + ) + + access_token, account_id = get_fresh_token(db_session, key.id) + assert access_token == "old_access_token" + assert account_id == "user-test-123" + + @patch("src.services.oauth_codex_service.httpx.Client") + def test_expired_token_triggers_refresh( + self, mock_client_cls, db_session, test_client_record + ): + """Token expiring within 60s triggers a refresh.""" + key = self._create_oauth_key( + db_session, test_client_record.id, + expires_at=time.time() + 30, # 30s from now — within buffer + ) + + import base64 + payload_data = { + "exp": int(time.time()) + 7200, + "https://api.openai.com/auth": {"chatgpt_account_id": "user-refreshed"}, + } + payload = base64.urlsafe_b64encode(json.dumps(payload_data).encode()).decode().rstrip("=") + header = base64.urlsafe_b64encode(json.dumps({"alg": "none"}).encode()).decode().rstrip("=") + new_jwt = f"{header}.{payload}.sig" + + mock_resp = MagicMock() + mock_resp.status_code = 200 + mock_resp.json.return_value = { + "access_token": new_jwt, + "refresh_token": "new_refresh_token", + "id_token": new_jwt, + } + mock_resp.raise_for_status = MagicMock() + mock_client_cls.return_value.__enter__ = MagicMock(return_value=MagicMock(post=MagicMock(return_value=mock_resp))) + mock_client_cls.return_value.__exit__ = MagicMock(return_value=False) + + access_token, account_id = get_fresh_token(db_session, key.id) + assert access_token == new_jwt + assert account_id == "user-refreshed" + + # Verify DB was updated + updated = decrypt_oauth_data(get_api_key(db_session, key.id).oauth_data) + assert updated["access_token"] == new_jwt + assert updated["refresh_token"] == "new_refresh_token" + + def test_missing_key_raises(self, db_session): + """get_fresh_token raises for non-existent key.""" + with pytest.raises(ValueError, match="not found"): + get_fresh_token(db_session, uuid.uuid4()) + + +# --------------------------------------------------------------------------- +# Test: OAuth status +# --------------------------------------------------------------------------- + +class TestOAuthStatus: + def test_connected_status(self, db_session, test_client_record): + """Connected OAuth key returns connected=True with details.""" + oauth_data = { + "access_token": "test", "refresh_token": "test", + "expires_at": time.time() + 3600, + "account_id": "user-abc", "plan_type": "plus", + } + key = ApiKey( + id=uuid.uuid4(), client_id=test_client_record.id, + name="Connected", provider="openai-codex", + auth_type="oauth_codex", + oauth_data=encrypt_oauth_data(oauth_data), + is_active=True, + ) + db_session.add(key) + db_session.commit() + + result = get_oauth_status(db_session, key.id) + assert result.connected is True + assert result.account_id == "user-abc" + assert result.plan_type == "plus" + + def test_disconnected_status(self, db_session, test_client_record): + """Inactive OAuth key returns connected=False.""" + key = ApiKey( + id=uuid.uuid4(), client_id=test_client_record.id, + name="Disconnected", provider="openai-codex", + auth_type="oauth_codex", is_active=False, + ) + db_session.add(key) + db_session.commit() + + result = get_oauth_status(db_session, key.id) + assert result.connected is False + + def test_standard_key_returns_not_connected(self, db_session, test_client_record): + """Standard API key returns connected=False for OAuth status.""" + key = create_api_key( + db_session, test_client_record.id, "Standard", "openai", "sk-test" + ) + result = get_oauth_status(db_session, key.id) + assert result.connected is False + + +# --------------------------------------------------------------------------- +# Test: Revoke OAuth +# --------------------------------------------------------------------------- + +class TestRevokeOAuth: + def test_revoke_deactivates_and_clears(self, db_session, test_client_record): + """Revoking clears oauth_data and deactivates the key.""" + oauth_data = {"access_token": "secret", "refresh_token": "secret"} + key = ApiKey( + id=uuid.uuid4(), client_id=test_client_record.id, + name="ToRevoke", provider="openai-codex", + auth_type="oauth_codex", + oauth_data=encrypt_oauth_data(oauth_data), + is_active=True, + ) + db_session.add(key) + db_session.commit() + + result = revoke_oauth(db_session, key.id) + assert result is True + + revoked = get_api_key(db_session, key.id) + assert revoked.is_active is False + assert revoked.oauth_data is None + + def test_revoke_nonexistent_key(self, db_session): + """Revoking non-existent key returns False.""" + result = revoke_oauth(db_session, uuid.uuid4()) + assert result is False + + +# --------------------------------------------------------------------------- +# Test: Model name remapping +# --------------------------------------------------------------------------- + +class TestModelRemapping: + def test_chatgpt_prefix_remapped(self): + """chatgpt/ prefix is remapped to openai/ for LiteLLM.""" + model = "chatgpt/gpt-5.3-codex" + if model.startswith("chatgpt/"): + model = "openai/" + model[len("chatgpt/"):] + assert model == "openai/gpt-5.3-codex" + + def test_openai_prefix_unchanged(self): + """openai/ prefix is not modified.""" + model = "openai/gpt-4o" + if model.startswith("chatgpt/"): + model = "openai/" + model[len("chatgpt/"):] + assert model == "openai/gpt-4o" + + def test_other_provider_unchanged(self): + """Non-OpenAI models are not affected.""" + model = "anthropic/claude-3-5-sonnet-20241022" + if model.startswith("chatgpt/"): + model = "openai/" + model[len("chatgpt/"):] + assert model == "anthropic/claude-3-5-sonnet-20241022" + + +# --------------------------------------------------------------------------- +# Test: Migration backward compatibility +# --------------------------------------------------------------------------- + +class TestMigrationCompat: + def test_existing_keys_get_default_auth_type(self, db_session, test_client_record): + """Keys created without explicit auth_type get 'api_key' default.""" + key = create_api_key( + db_session, test_client_record.id, "Legacy", "openai", "sk-legacy" + ) + # Simulate reading a key that was created before the migration + record = get_api_key_record(db_session, key.id) + assert record.auth_type == "api_key" + assert record.oauth_data is None + assert record.encrypted_key is not None diff --git a/nginx/Dockerfile b/nginx/Dockerfile index 47d11be..61a1367 100644 --- a/nginx/Dockerfile +++ b/nginx/Dockerfile @@ -1,3 +1,15 @@ FROM nginx:alpine -COPY nginx.conf /etc/nginx/conf.d/default.conf + +COPY nginx.conf /etc/nginx/nginx.conf.template +COPY entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh + +ENV AUTH_UPSTREAM=evo_auth:3001 +ENV CRM_UPSTREAM=evo_crm:3000 +ENV CORE_UPSTREAM=evo_core:5555 +ENV PROCESSOR_UPSTREAM=evo_processor:8000 +ENV BOT_RUNTIME_UPSTREAM=evo_bot_runtime:8080 + EXPOSE 3030 + +ENTRYPOINT ["/entrypoint.sh"] diff --git a/nginx/entrypoint.sh b/nginx/entrypoint.sh new file mode 100755 index 0000000..33aa77b --- /dev/null +++ b/nginx/entrypoint.sh @@ -0,0 +1,3 @@ +#!/bin/sh +envsubst '$AUTH_UPSTREAM $CRM_UPSTREAM $CORE_UPSTREAM $PROCESSOR_UPSTREAM $BOT_RUNTIME_UPSTREAM' < /etc/nginx/nginx.conf.template > /etc/nginx/conf.d/default.conf +exec nginx -g "daemon off;" diff --git a/nginx/nginx.conf b/nginx/nginx.conf index fa240f5..4cda6cc 100644 --- a/nginx/nginx.conf +++ b/nginx/nginx.conf @@ -22,11 +22,11 @@ server { # ============================================================================= # Upstream targets as variables # ============================================================================= - set $auth_service http://evo_auth:3001; - set $evoai_service http://evo_core:5555; - set $crm_service http://evo_crm:3000; - set $processor_service http://evo_processor:8000; - set $bot_runtime_service http://evo_bot_runtime:8080; + set $auth_service http://${AUTH_UPSTREAM}; + set $evoai_service http://${CORE_UPSTREAM}; + set $crm_service http://${CRM_UPSTREAM}; + set $processor_service http://${PROCESSOR_UPSTREAM}; + set $bot_runtime_service http://${BOT_RUNTIME_UPSTREAM}; underscores_in_headers on; @@ -265,6 +265,18 @@ server { } # ============================================================================= + # OAuth Codex Routes → Processor Service + # ============================================================================= + location ~ ^/api/v1/agents/oauth { + proxy_pass $processor_service$request_uri; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto https; + proxy_set_header Authorization $http_authorization; + proxy_set_header x-client-id $http_x_client_id; + } + # EvoAI Core Service Routes # ============================================================================= location ~ ^/api/v1/(agents|folders|mcp-servers|custom-mcp-servers|custom-tools) { diff --git a/setup.sh b/setup.sh index f8d2c3c..be534ee 100755 --- a/setup.sh +++ b/setup.sh @@ -197,7 +197,7 @@ echo "" # Step 6: Seed Auth service (must be first) # --------------------------------------------------------------------------- info "Seeding Auth service (creating default account and user)..." -docker compose run --rm evo-auth bash -c "bundle exec rails db:prepare && bundle exec rails db:seed" +docker compose run --rm evo-auth sh -c "bundle exec rails db:prepare && bundle exec rails db:seed" success "Auth service seeded" echo "" @@ -206,7 +206,7 @@ echo "" # Step 7: Seed CRM service # --------------------------------------------------------------------------- info "Seeding CRM service (creating default inbox)..." -docker compose run --rm evo-crm bash -c "bundle exec rails db:prepare && bundle exec rails db:seed" +docker compose run --rm evo-crm sh -c "bundle exec rails db:prepare && bundle exec rails db:seed" success "CRM service seeded" echo ""