Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/api/providers/pearai/pearai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ export class PearAiHandler extends BaseProvider implements SingleCompletionHandl
apiModelId: underlyingModel,
})
} else {
// Use OpenAI fields here as we are using the same handler structure as OpenAI Hander lin PearAIGenericHandler
this.handler = new PearAIGenericHandler({
...options,
openAiBaseUrl: PEARAI_URL,
Expand Down
24 changes: 13 additions & 11 deletions src/api/providers/pearai/pearaiGeneric.ts
Original file line number Diff line number Diff line change
Expand Up @@ -221,28 +221,30 @@ export class PearAIGenericHandler extends BaseProvider implements SingleCompleti
}

override getModel(): { id: string; info: ModelInfo } {
const modelId = this.options.openAiModelId ?? "none"
const modelId = this.options.openAiModelId
// Prioritize serverside model info
if (this.options.apiModelId && this.options.pearaiAgentModels) {
if (modelId && this.options.pearaiAgentModels) {
let modelInfo = null
if (this.options.apiModelId.startsWith("pearai")) {
modelInfo = this.options.pearaiAgentModels.models[this.options.apiModelId].underlyingModelUpdated
if (modelId.startsWith("pearai")) {
modelInfo = this.options.pearaiAgentModels.models[modelId].underlyingModelUpdated
} else {
modelInfo = this.options.pearaiAgentModels.models[this.options.apiModelId || "pearai-model"]
modelInfo = this.options.pearaiAgentModels.models[modelId || "pearai-model"]
}
if (modelInfo) {
return {
id: this.options.apiModelId,
const result = {
id: modelId,
info: modelInfo,
}
return result
}
}
return {
id: modelId,
info: allModels[modelId],

const result = {
id: modelId ?? pearAiDefaultModelId,
info: allModels[modelId ?? pearAiDefaultModelId],
}
return result
}

async completePrompt(prompt: string): Promise<string> {
try {
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
Expand Down
Loading