Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
be7b8c8
add byok modal, update user proto and model
kah-seng Jan 27, 2026
783f85e
Merge branch 'main' of https://github.com/PaperDebugger/paperdebugger…
kah-seng Jan 28, 2026
9603404
update CustomModel proto, model and mapper
kah-seng Jan 28, 2026
49ac78d
add button functions
kah-seng Jan 29, 2026
38d7d56
change CustomModel in user.proto to match SupportedModel in chat.proto
kah-seng Jan 30, 2026
3cb6692
allow custom model to be selected in chat
kah-seng Jan 30, 2026
83efbac
Merge branch 'main' into feat/byok
kah-seng Jan 30, 2026
9259e76
Merge branch 'main' into feat/byok
kah-seng Feb 3, 2026
6333acc
add select input, update model selection
kah-seng Feb 3, 2026
f693cf4
Merge branch 'main' into feat/byok
kah-seng Feb 14, 2026
adf8216
todo: fix gemini bad request
kah-seng Feb 15, 2026
ebbfb44
fix gemini chat params
kah-seng Feb 19, 2026
a7b7ff7
Update llmProvider
kah-seng Feb 21, 2026
110bcaf
Polish UI
kah-seng Feb 21, 2026
58e9aac
Fix defaults
kah-seng Feb 21, 2026
f807eaf
Change slug to text input, hide disabled models
kah-seng Mar 18, 2026
035e6da
Allow multiple models with same slugs
kah-seng Mar 18, 2026
c9b078a
Trim inputs, polish UI
kah-seng Mar 19, 2026
6981907
UI polish, do not send disabled models
kah-seng Mar 19, 2026
b6975ef
Merge branch 'staging' into feat/byok
kah-seng Mar 19, 2026
6031234
Make gen
kah-seng Mar 19, 2026
8bf9095
Update UI
kah-seng Mar 19, 2026
167416b
Add input validation error indicator
kah-seng Mar 20, 2026
d107f62
Remove sorting of models by name
kah-seng Mar 20, 2026
6242602
Add optional fields
kah-seng Mar 20, 2026
18a5557
Revert package-lock.json
kah-seng Mar 20, 2026
dc587af
Add baseUrl https validation
kah-seng Mar 20, 2026
bfeac21
Merge branch 'staging' into feat/byok
kah-seng Mar 26, 2026
8c11e21
make gen
kah-seng Mar 26, 2026
0022664
Resolve Copilot comments
kah-seng Mar 26, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 35 additions & 3 deletions internal/api/chat/create_conversation_message_stream_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import (
"paperdebugger/internal/models"
"paperdebugger/internal/services"
chatv2 "paperdebugger/pkg/gen/api/chat/v2"
"strings"

"github.com/google/uuid"
"github.com/openai/openai-go/v3"
Expand Down Expand Up @@ -276,9 +277,40 @@ func (s *ChatServerV2) CreateConversationMessageStream(
return s.sendStreamError(stream, err)
}

// Check if user has an API key for requested model
var llmProvider *models.LLMProviderConfig
var customModel *models.CustomModel
customModel = nil
for i := range settings.CustomModels {
if settings.CustomModels[i].Slug == modelSlug {
customModel = &settings.CustomModels[i]
}
}

// Usage is the same as ChatCompletion, just passing the stream parameter
llmProvider := &models.LLMProviderConfig{
APIKey: settings.OpenAIAPIKey,

if customModel == nil {
// User did not specify API key for this model
llmProvider = &models.LLMProviderConfig{
APIKey: "",
IsCustomModel: false,
}
} else {
customModel.BaseUrl = strings.ToLower(customModel.BaseUrl)

if strings.Contains(customModel.BaseUrl, "paperdebugger.com") {
customModel.BaseUrl = ""
}
if !strings.HasPrefix(customModel.BaseUrl, "https://") {
customModel.BaseUrl = strings.Replace(customModel.BaseUrl, "http://", "", 1)
customModel.BaseUrl = "https://" + customModel.BaseUrl
}

llmProvider = &models.LLMProviderConfig{
APIKey: customModel.APIKey,
Endpoint: customModel.BaseUrl,
IsCustomModel: true,
}
}

openaiChatHistory, inappChatHistory, err := s.aiClientV2.ChatCompletionStreamV2(ctx, stream, conversation.ID.Hex(), modelSlug, conversation.OpenaiChatHistoryCompletion, llmProvider)
Expand Down Expand Up @@ -307,7 +339,7 @@ func (s *ChatServerV2) CreateConversationMessageStream(
for i, bsonMsg := range conversation.InappChatHistory {
protoMessages[i] = mapper.BSONToChatMessageV2(bsonMsg)
}
title, err := s.aiClientV2.GetConversationTitleV2(ctx, protoMessages, llmProvider)
title, err := s.aiClientV2.GetConversationTitleV2(ctx, protoMessages, llmProvider, modelSlug)
if err != nil {
s.logger.Error("Failed to get conversation title", "error", err, "conversationID", conversation.ID.Hex())
return
Expand Down
32 changes: 16 additions & 16 deletions internal/api/chat/list_supported_models_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package chat

import (
"context"
"strings"

"paperdebugger/internal/libs/contextutil"
chatv2 "paperdebugger/pkg/gen/api/chat/v2"
Expand Down Expand Up @@ -220,32 +219,33 @@ func (s *ChatServerV2) ListSupportedModels(
return nil, err
}

hasOwnAPIKey := strings.TrimSpace(settings.OpenAIAPIKey) != ""

var models []*chatv2.SupportedModel
for _, config := range allModels {
// Choose the appropriate slug based on whether user has their own API key.
//
// Some models are only available via OpenRouter; for those, slugOpenAI may be empty.
// In that case, keep using the OpenRouter slug to avoid returning an empty model slug.
slug := config.slugOpenRouter
if hasOwnAPIKey && strings.TrimSpace(config.slugOpenAI) != "" {
slug = config.slugOpenAI
}

for _, model := range settings.CustomModels {
models = append(models, &chatv2.SupportedModel{
Name: model.Name,
Slug: model.Slug,
TotalContext: int64(model.ContextWindow),
MaxOutput: int64(model.MaxOutput),
InputPrice: int64(model.InputPrice),
OutputPrice: int64(model.OutputPrice),
IsCustom: true,
})
}

for _, config := range allModels {
model := &chatv2.SupportedModel{
Name: config.name,
Slug: slug,
Slug: config.slugOpenRouter,
TotalContext: config.totalContext,
MaxOutput: config.maxOutput,
InputPrice: config.inputPrice,
OutputPrice: config.outputPrice,
}

// If model requires own key but user hasn't provided one, mark as disabled
if config.requireOwnKey && !hasOwnAPIKey {
model.Disabled = true
model.DisabledReason = stringPtr("Requires your own OpenAI API key. Configure it in Settings.")
if config.requireOwnKey {
continue
}

models = append(models, model)
Expand Down
47 changes: 45 additions & 2 deletions internal/api/mapper/user.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,26 +3,69 @@ package mapper
import (
"paperdebugger/internal/models"
userv1 "paperdebugger/pkg/gen/api/user/v1"

"go.mongodb.org/mongo-driver/v2/bson"
)

func MapProtoSettingsToModel(settings *userv1.Settings) *models.Settings {
// Map the slice of custom models
customModels := make([]models.CustomModel, len(settings.CustomModels))
for i, m := range settings.CustomModels {
var id bson.ObjectID

id, err := bson.ObjectIDFromHex(m.Id)
if err != nil {
id = bson.NewObjectID()
}

customModels[i] = models.CustomModel{
Id: id,
Slug: m.Slug,
Name: m.Name,
BaseUrl: m.BaseUrl,
APIKey: m.ApiKey,
ContextWindow: m.ContextWindow,
MaxOutput: m.MaxOutput,
InputPrice: m.InputPrice,
OutputPrice: m.OutputPrice,
}
}

return &models.Settings{
ShowShortcutsAfterSelection: settings.ShowShortcutsAfterSelection,
FullWidthPaperDebuggerButton: settings.FullWidthPaperDebuggerButton,
EnableCitationSuggestion: settings.EnableCitationSuggestion,
EnableCitationSuggestion: settings.EnableCitationSuggestion,
FullDocumentRag: settings.FullDocumentRag,
ShowedOnboarding: settings.ShowedOnboarding,
OpenAIAPIKey: settings.OpenaiApiKey,
CustomModels: customModels,
}
}

func MapModelSettingsToProto(settings *models.Settings) *userv1.Settings {
// Map the slice back to Proto
customModels := make([]*userv1.CustomModel, len(settings.CustomModels))
for i, m := range settings.CustomModels {
customModels[i] = &userv1.CustomModel{
Id: m.Id.Hex(),
Slug: m.Slug,
Name: m.Name,
BaseUrl: m.BaseUrl,
ApiKey: m.APIKey,
ContextWindow: m.ContextWindow,
MaxOutput: m.MaxOutput,
InputPrice: m.InputPrice,
OutputPrice: m.OutputPrice,
}
}

return &userv1.Settings{
ShowShortcutsAfterSelection: settings.ShowShortcutsAfterSelection,
FullWidthPaperDebuggerButton: settings.FullWidthPaperDebuggerButton,
EnableCitationSuggestion: settings.EnableCitationSuggestion,
EnableCitationSuggestion: settings.EnableCitationSuggestion,
FullDocumentRag: settings.FullDocumentRag,
ShowedOnboarding: settings.ShowedOnboarding,
OpenaiApiKey: settings.OpenAIAPIKey,
CustomModels: customModels,
}
}
9 changes: 6 additions & 3 deletions internal/models/llm_provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,13 @@ package models

// LLMProviderConfig holds the configuration for LLM API calls.
// If both Endpoint and APIKey are empty, the system default will be used.
// If IsCustomModel is true, the user-requested slug with corresponding
// API keys and endpoint should be used.
type LLMProviderConfig struct {
Endpoint string
APIKey string
ModelName string
Endpoint string
APIKey string
ModelName string
IsCustomModel bool
}

// IsCustom returns true if the user has configured custom LLM provider settings.
Expand Down
25 changes: 19 additions & 6 deletions internal/models/user.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,26 @@ package models

import "go.mongodb.org/mongo-driver/v2/bson"

type CustomModel struct {
Id bson.ObjectID `bson:"_id"`
Slug string `bson:"slug"`
Name string `bson:"name"`
BaseUrl string `bson:"base_url"`
APIKey string `bson:"api_key"`
ContextWindow int32 `bson:"context_window"`
MaxOutput int32 `bson:"max_output"`
InputPrice int32 `bson:"input_price"`
OutputPrice int32 `bson:"output_price"`
}

type Settings struct {
ShowShortcutsAfterSelection bool `bson:"show_shortcuts_after_selection"`
FullWidthPaperDebuggerButton bool `bson:"full_width_paper_debugger_button"`
EnableCitationSuggestion bool `bson:"enable_citation_suggestion"`
FullDocumentRag bool `bson:"full_document_rag"`
ShowedOnboarding bool `bson:"showed_onboarding"`
OpenAIAPIKey string `bson:"openai_api_key"`
ShowShortcutsAfterSelection bool `bson:"show_shortcuts_after_selection"`
FullWidthPaperDebuggerButton bool `bson:"full_width_paper_debugger_button"`
EnableCitationSuggestion bool `bson:"enable_citation_suggestion"`
FullDocumentRag bool `bson:"full_document_rag"`
ShowedOnboarding bool `bson:"showed_onboarding"`
OpenAIAPIKey string `bson:"openai_api_key"`
CustomModels []CustomModel `bson:"custom_models"`
}

type User struct {
Expand Down
22 changes: 12 additions & 10 deletions internal/services/toolkit/client/client_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,18 +32,20 @@ func (a *AIClientV2) GetOpenAIClient(llmConfig *models.LLMProviderConfig) *opena
var Endpoint string = llmConfig.Endpoint
var APIKey string = llmConfig.APIKey

if Endpoint == "" {
if APIKey != "" {
// User provided their own API key, use the OpenAI-compatible endpoint
Endpoint = a.cfg.OpenAIBaseURL // standard openai base url
} else {
// suffix needed for cloudflare gateway
Endpoint = a.cfg.InferenceBaseURL + "/openrouter"
if !llmConfig.IsCustomModel {
if Endpoint == "" {
if APIKey != "" {
// User provided their own API key, use the OpenAI-compatible endpoint
Endpoint = a.cfg.OpenAIBaseURL // standard openai base url
} else {
// suffix needed for cloudflare gateway
Endpoint = a.cfg.InferenceBaseURL + "/openrouter"
}
}
}

if APIKey == "" {
APIKey = a.cfg.InferenceAPIKey
if APIKey == "" {
APIKey = a.cfg.InferenceAPIKey
}
}

opts := []option.RequestOption{
Expand Down
2 changes: 1 addition & 1 deletion internal/services/toolkit/client/completion_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ func (a *AIClientV2) ChatCompletionStreamV2(ctx context.Context, callbackStream
}()

oaiClient := a.GetOpenAIClient(llmProvider)
params := getDefaultParamsV2(modelSlug, a.toolCallHandler.Registry)
params := getDefaultParamsV2(modelSlug, a.toolCallHandler.Registry, llmProvider.IsCustomModel)

for {
params.Messages = openaiChatHistory
Expand Down
10 changes: 8 additions & 2 deletions internal/services/toolkit/client/get_conversation_title_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import (
"github.com/samber/lo"
)

func (a *AIClientV2) GetConversationTitleV2(ctx context.Context, inappChatHistory []*chatv2.Message, llmProvider *models.LLMProviderConfig) (string, error) {
func (a *AIClientV2) GetConversationTitleV2(ctx context.Context, inappChatHistory []*chatv2.Message, llmProvider *models.LLMProviderConfig, modelSlug string) (string, error) {
messages := lo.Map(inappChatHistory, func(message *chatv2.Message, _ int) string {
if _, ok := message.Payload.MessageType.(*chatv2.MessagePayload_Assistant); ok {
return fmt.Sprintf("Assistant: %s", message.Payload.GetAssistant().GetContent())
Expand All @@ -29,7 +29,13 @@ func (a *AIClientV2) GetConversationTitleV2(ctx context.Context, inappChatHistor
message := strings.Join(messages, "\n")
message = fmt.Sprintf("%s\nBased on above conversation, generate a short, clear, and descriptive title that summarizes the main topic or purpose of the discussion. The title should be concise, specific, and use natural language. Avoid vague or generic titles. Use abbreviation and short words if possible. Use 3-5 words if possible. Give me the title only, no other text including any other words.", message)

_, resp, err := a.ChatCompletionV2(ctx, "gpt-5-nano", OpenAIChatHistory{
// Default model if user is not using their own
modelToUse := "gpt-5-nano"
if llmProvider.IsCustomModel {
modelToUse = modelSlug
}

_, resp, err := a.ChatCompletionV2(ctx, modelToUse, OpenAIChatHistory{
openai.SystemMessage("You are a helpful assistant that generates a title for a conversation."),
openai.UserMessage(message),
}, llmProvider)
Expand Down
14 changes: 13 additions & 1 deletion internal/services/toolkit/client/utils_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ func appendAssistantTextResponseV2(openaiChatHistory *OpenAIChatHistory, inappCh
})
}

func getDefaultParamsV2(modelSlug string, toolRegistry *registry.ToolRegistryV2) openaiv3.ChatCompletionNewParams {
func getDefaultParamsV2(modelSlug string, toolRegistry *registry.ToolRegistryV2, isCustomModel bool) openaiv3.ChatCompletionNewParams {
var reasoningModels = []string{
"gpt-5",
"gpt-5-mini",
Expand All @@ -66,6 +66,18 @@ func getDefaultParamsV2(modelSlug string, toolRegistry *registry.ToolRegistryV2)
"o1",
"codex-mini-latest",
}

// Other model providers generally do not support the Store param
if isCustomModel {
return openaiv3.ChatCompletionNewParams{
Model: modelSlug,
Temperature: openaiv3.Float(0.7),
MaxCompletionTokens: openaiv3.Int(4000),
Tools: toolRegistry.GetTools(),
ParallelToolCalls: openaiv3.Bool(true),
}
}

for _, model := range reasoningModels {
if strings.Contains(modelSlug, model) {
return openaiv3.ChatCompletionNewParams{
Expand Down
20 changes: 14 additions & 6 deletions pkg/gen/api/chat/v2/chat.pb.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading