From 47279431722838fa214b669133e98eeb98a20370 Mon Sep 17 00:00:00 2001 From: nang-dev Date: Mon, 21 Apr 2025 16:00:59 -0400 Subject: [PATCH] Hotfix --- src/api/providers/pearai/pearai.ts | 1 + src/api/providers/pearai/pearaiGeneric.ts | 24 ++++++++++++----------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/src/api/providers/pearai/pearai.ts b/src/api/providers/pearai/pearai.ts index d180ac175f8..5ed332436e6 100644 --- a/src/api/providers/pearai/pearai.ts +++ b/src/api/providers/pearai/pearai.ts @@ -82,6 +82,7 @@ export class PearAiHandler extends BaseProvider implements SingleCompletionHandl apiModelId: underlyingModel, }) } else { + // Use OpenAI fields here as we are using the same handler structure as OpenAI Hander lin PearAIGenericHandler this.handler = new PearAIGenericHandler({ ...options, openAiBaseUrl: PEARAI_URL, diff --git a/src/api/providers/pearai/pearaiGeneric.ts b/src/api/providers/pearai/pearaiGeneric.ts index 96c401f9b71..15509de95b3 100644 --- a/src/api/providers/pearai/pearaiGeneric.ts +++ b/src/api/providers/pearai/pearaiGeneric.ts @@ -221,28 +221,30 @@ export class PearAIGenericHandler extends BaseProvider implements SingleCompleti } override getModel(): { id: string; info: ModelInfo } { - const modelId = this.options.openAiModelId ?? "none" + const modelId = this.options.openAiModelId // Prioritize serverside model info - if (this.options.apiModelId && this.options.pearaiAgentModels) { + if (modelId && this.options.pearaiAgentModels) { let modelInfo = null - if (this.options.apiModelId.startsWith("pearai")) { - modelInfo = this.options.pearaiAgentModels.models[this.options.apiModelId].underlyingModelUpdated + if (modelId.startsWith("pearai")) { + modelInfo = this.options.pearaiAgentModels.models[modelId].underlyingModelUpdated } else { - modelInfo = this.options.pearaiAgentModels.models[this.options.apiModelId || "pearai-model"] + modelInfo = this.options.pearaiAgentModels.models[modelId || "pearai-model"] } if (modelInfo) { - return { - id: this.options.apiModelId, + const result = { + id: modelId, info: modelInfo, } + return result } } - return { - id: modelId, - info: allModels[modelId], + + const result = { + id: modelId ?? pearAiDefaultModelId, + info: allModels[modelId ?? pearAiDefaultModelId], } + return result } - async completePrompt(prompt: string): Promise { try { const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {