Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 22 additions & 19 deletions src/api/providers/pearai/pearai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import { PEARAI_URL } from "../../../shared/pearaiApi"
interface PearAiModelsResponse {
models: {
[key: string]: {
underlyingModel?: string
underlyingModel?: { [key: string]: any }
[key: string]: any
}
}
Expand Down Expand Up @@ -70,7 +70,10 @@ export class PearAiHandler extends BaseProvider implements SingleCompletionHandl
}
const data = (await response.json()) as PearAiModelsResponse
this.pearAiModelsResponse = data
const underlyingModel = data.models[modelId]?.underlyingModelUpdated || "claude-3-5-sonnet-20241022"
const underlyingModel =
data.models[modelId]?.underlyingModelUpdated?.underlyingModel ||
data.models[modelId]?.underlyingModel ||
"claude-3-5-sonnet-20241022"
if (underlyingModel.startsWith("claude") || modelId.startsWith("anthropic/")) {
// Default to Claude
this.handler = new AnthropicHandler({
Expand Down Expand Up @@ -114,25 +117,25 @@ export class PearAiHandler extends BaseProvider implements SingleCompletionHandl
}

getModel(): { id: string; info: ModelInfo } {
if (
this.pearAiModelsResponse &&
this.options.apiModelId === "pearai-model" &&
this.pearAiModelsResponse.models
) {
const modelInfo = this.pearAiModelsResponse.models[this.options.apiModelId]
if (modelInfo) {
return {
id: this.options.apiModelId,
info: {
contextWindow: modelInfo.contextWindow || 4096, // provide default or actual value
supportsPromptCache: modelInfo.supportsPromptCaching || false, // provide default or actual value
...modelInfo,
},
}
if (this.options.apiModelId) {
let modelInfo = null
if (this.options.apiModelId === "pearai-model") {
modelInfo = this.pearAiModelsResponse?.models["pearai-model"].underlyingModelUpdated
} else if (this.pearAiModelsResponse) {
modelInfo = this.pearAiModelsResponse.models[this.options.apiModelId || "pearai-model"]
}
return {
id: this.options.apiModelId,
info: {
contextWindow: modelInfo.contextWindow || 4096, // provide default or actual value
supportsPromptCache: modelInfo.supportsPromptCaching || false, // provide default or actual value
...modelInfo,
},
}
} else {
const baseModel = this.handler.getModel()
return baseModel
}
const baseModel = this.handler.getModel()
return baseModel
}

async *createMessage(systemPrompt: string, messages: any[]): AsyncGenerator<any> {
Expand Down
3 changes: 2 additions & 1 deletion src/shared/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -159,8 +159,9 @@ export interface ModelInfo {
description?: string
reasoningEffort?: "low" | "medium" | "high"
thinking?: boolean
underlyingModel?: string
underlyingModelUpdated?: Record<string, any>
}

// Anthropic
// https://docs.anthropic.com/en/docs/about-claude/models
export type AnthropicModelId = keyof typeof anthropicModels
Expand Down
11 changes: 10 additions & 1 deletion webview-ui/src/components/settings/ApiOptions.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -1598,7 +1598,16 @@ export function normalizeApiConfiguration(

if (modelId && modelId in models) {
selectedModelId = modelId
selectedModelInfo = models[modelId]
if (modelId === "pearai-model" && models[modelId].underlyingModelUpdated) {
let modelInfo = models[modelId].underlyingModelUpdated
selectedModelInfo = {
contextWindow: modelInfo.contextWindow || 4096, // provide default or actual value
supportsPromptCache: modelInfo.supportsPromptCaching || false, // provide default or actual value
...modelInfo,
}
} else {
selectedModelInfo = models[modelId]
}
} else {
selectedModelId = defaultId
selectedModelInfo = models[defaultId]
Expand Down
Loading