From 6d5b3897f94d9d165877497ae3f1ae8e24026129 Mon Sep 17 00:00:00 2001 From: wjiayis Date: Sat, 24 Jan 2026 20:24:16 +0800 Subject: [PATCH 1/3] fix: byok compatibility --- internal/services/toolkit/client/client_v2.go | 21 ++++++++++++++++--- internal/services/toolkit/client/utils_v2.go | 11 ++++++++-- 2 files changed, 27 insertions(+), 5 deletions(-) diff --git a/internal/services/toolkit/client/client_v2.go b/internal/services/toolkit/client/client_v2.go index 8279b6bc..447165e6 100644 --- a/internal/services/toolkit/client/client_v2.go +++ b/internal/services/toolkit/client/client_v2.go @@ -63,11 +63,26 @@ func NewAIClientV2( logger *logger.Logger, ) *AIClientV2 { database := db.Database("paperdebugger") + + llmProvider := &models.LLMProviderConfig{ + APIKey: cfg.OpenAIAPIKey, + } + + var baseUrl string + var apiKey string + if llmProvider != nil && llmProvider.IsCustom() { + baseUrl = cfg.InferenceBaseURL + "/openai" + apiKey = cfg.OpenAIAPIKey + } else { + baseUrl = cfg.InferenceBaseURL + "/openrouter" + apiKey = cfg.InferenceAPIKey + } + oaiClient := openai.NewClient( - option.WithBaseURL(cfg.InferenceBaseURL+"/openrouter"), - option.WithAPIKey(cfg.InferenceAPIKey), + option.WithBaseURL(baseUrl), + option.WithAPIKey(apiKey), ) - CheckOpenAIWorksV2(oaiClient, logger) + CheckOpenAIWorksV2(oaiClient, llmProvider, logger) toolRegistry := initializeToolkitV2(db, projectService, cfg, logger) toolCallHandler := handler.NewToolCallHandlerV2(toolRegistry) diff --git a/internal/services/toolkit/client/utils_v2.go b/internal/services/toolkit/client/utils_v2.go index e502cb21..f48cbe3e 100644 --- a/internal/services/toolkit/client/utils_v2.go +++ b/internal/services/toolkit/client/utils_v2.go @@ -12,6 +12,7 @@ import ( "paperdebugger/internal/libs/db" "paperdebugger/internal/libs/logger" "paperdebugger/internal/services" + "paperdebugger/internal/models" "paperdebugger/internal/services/toolkit/registry" "paperdebugger/internal/services/toolkit/tools/xtramcp" chatv2 "paperdebugger/pkg/gen/api/chat/v2" @@ -87,13 +88,19 @@ func getDefaultParamsV2(modelSlug string, toolRegistry *registry.ToolRegistryV2) } } -func CheckOpenAIWorksV2(oaiClient openaiv3.Client, logger *logger.Logger) { +func CheckOpenAIWorksV2(oaiClient openaiv3.Client, llmProvider *models.LLMProviderConfig, logger *logger.Logger) { logger.Info("[AI Client V2] checking if openai client works") + + var model = "openai/gpt-5-nano" + if llmProvider != nil && llmProvider.IsCustom() { + model = model[strings.LastIndex(model, "/")+1:] + } + chatCompletion, err := oaiClient.Chat.Completions.New(context.TODO(), openaiv3.ChatCompletionNewParams{ Messages: []openaiv3.ChatCompletionMessageParamUnion{ openaiv3.UserMessage("Say 'openai client works'"), }, - Model: "openai/gpt-5-nano", + Model: model, }) if err != nil { logger.Errorf("[AI Client V2] openai client does not work: %v", err) From 1128387ab0bfc9494c9e3cae69e171229a33fe7b Mon Sep 17 00:00:00 2001 From: wjiayis Date: Sun, 25 Jan 2026 15:13:30 +0800 Subject: [PATCH 2/3] refactor: improve byok compatibility fix based on comments --- internal/services/toolkit/client/client_v2.go | 8 ++++++-- .../services/toolkit/client/completion_v2.go | 3 ++- internal/services/toolkit/client/utils_v2.go | 19 +++++++++---------- 3 files changed, 17 insertions(+), 13 deletions(-) diff --git a/internal/services/toolkit/client/client_v2.go b/internal/services/toolkit/client/client_v2.go index 447165e6..eb83f793 100644 --- a/internal/services/toolkit/client/client_v2.go +++ b/internal/services/toolkit/client/client_v2.go @@ -70,19 +70,23 @@ func NewAIClientV2( var baseUrl string var apiKey string + var modelSlug string + if llmProvider != nil && llmProvider.IsCustom() { - baseUrl = cfg.InferenceBaseURL + "/openai" + baseUrl = cfg.OpenAIBaseURL apiKey = cfg.OpenAIAPIKey + modelSlug = "gpt-5-nano" } else { baseUrl = cfg.InferenceBaseURL + "/openrouter" apiKey = cfg.InferenceAPIKey + modelSlug = "openai/gpt-5-nano" } oaiClient := openai.NewClient( option.WithBaseURL(baseUrl), option.WithAPIKey(apiKey), ) - CheckOpenAIWorksV2(oaiClient, llmProvider, logger) + CheckOpenAIWorksV2(oaiClient, baseUrl, modelSlug, logger) toolRegistry := initializeToolkitV2(db, projectService, cfg, logger) toolCallHandler := handler.NewToolCallHandlerV2(toolRegistry) diff --git a/internal/services/toolkit/client/completion_v2.go b/internal/services/toolkit/client/completion_v2.go index e7e5b7b2..a428edc6 100644 --- a/internal/services/toolkit/client/completion_v2.go +++ b/internal/services/toolkit/client/completion_v2.go @@ -65,7 +65,8 @@ func (a *AIClientV2) ChatCompletionStreamV2(ctx context.Context, callbackStream }() oaiClient := a.GetOpenAIClient(llmProvider) - params := getDefaultParamsV2(modelSlug, a.toolCallHandler.Registry) + var isCustomModel bool = llmProvider != nil && llmProvider.IsCustom() + params := getDefaultParamsV2(modelSlug, a.toolCallHandler.Registry, isCustomModel) for { params.Messages = openaiChatHistory diff --git a/internal/services/toolkit/client/utils_v2.go b/internal/services/toolkit/client/utils_v2.go index f48cbe3e..e1960ddb 100644 --- a/internal/services/toolkit/client/utils_v2.go +++ b/internal/services/toolkit/client/utils_v2.go @@ -6,13 +6,13 @@ This file contains utility functions for the client package. (Mainly miscellaneo It is used to append assistant responses to both OpenAI and in-app chat histories, and to create response items for chat interactions. */ import ( + "path" "context" "fmt" "paperdebugger/internal/libs/cfg" "paperdebugger/internal/libs/db" "paperdebugger/internal/libs/logger" "paperdebugger/internal/services" - "paperdebugger/internal/models" "paperdebugger/internal/services/toolkit/registry" "paperdebugger/internal/services/toolkit/tools/xtramcp" chatv2 "paperdebugger/pkg/gen/api/chat/v2" @@ -53,7 +53,12 @@ func appendAssistantTextResponseV2(openaiChatHistory *OpenAIChatHistory, inappCh }) } -func getDefaultParamsV2(modelSlug string, toolRegistry *registry.ToolRegistryV2) openaiv3.ChatCompletionNewParams { +func getDefaultParamsV2(modelSlug string, toolRegistry *registry.ToolRegistryV2, isCustomModel bool) openaiv3.ChatCompletionNewParams { + // If custom model is used, strip prefix (eg "openai/gpt-4o" -> "gpt-4o") + if isCustomModel { + modelSlug = path.Base(modelSlug) + } + var reasoningModels = []string{ "gpt-5", "gpt-5-mini", @@ -88,14 +93,8 @@ func getDefaultParamsV2(modelSlug string, toolRegistry *registry.ToolRegistryV2) } } -func CheckOpenAIWorksV2(oaiClient openaiv3.Client, llmProvider *models.LLMProviderConfig, logger *logger.Logger) { - logger.Info("[AI Client V2] checking if openai client works") - - var model = "openai/gpt-5-nano" - if llmProvider != nil && llmProvider.IsCustom() { - model = model[strings.LastIndex(model, "/")+1:] - } - +func CheckOpenAIWorksV2(oaiClient openaiv3.Client, baseUrl string, model string, logger *logger.Logger) { + logger.Info("[AI Client V2] checking if openai client works with " + baseUrl + "..") chatCompletion, err := oaiClient.Chat.Completions.New(context.TODO(), openaiv3.ChatCompletionNewParams{ Messages: []openaiv3.ChatCompletionMessageParamUnion{ openaiv3.UserMessage("Say 'openai client works'"), From 9564680945d6c777f9557664a5082244492778bb Mon Sep 17 00:00:00 2001 From: wjiayis Date: Sun, 25 Jan 2026 15:51:20 +0800 Subject: [PATCH 3/3] fix: use exact match instead of substring in getDefaultParamsV2 because eg gpt-5-mini could match to gpt-5 --- internal/services/toolkit/client/utils_v2.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/internal/services/toolkit/client/utils_v2.go b/internal/services/toolkit/client/utils_v2.go index e1960ddb..2fa04b94 100644 --- a/internal/services/toolkit/client/utils_v2.go +++ b/internal/services/toolkit/client/utils_v2.go @@ -16,7 +16,6 @@ import ( "paperdebugger/internal/services/toolkit/registry" "paperdebugger/internal/services/toolkit/tools/xtramcp" chatv2 "paperdebugger/pkg/gen/api/chat/v2" - "strings" "time" openaiv3 "github.com/openai/openai-go/v3" @@ -72,7 +71,7 @@ func getDefaultParamsV2(modelSlug string, toolRegistry *registry.ToolRegistryV2, "codex-mini-latest", } for _, model := range reasoningModels { - if strings.Contains(modelSlug, model) { + if modelSlug == model { return openaiv3.ChatCompletionNewParams{ Model: modelSlug, MaxCompletionTokens: openaiv3.Int(4000),