Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions cli/src/constants/providers/labels.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ export const PROVIDER_LABELS: Record<ProviderName, string> = {
"human-relay": "Human Relay",
"fake-ai": "Fake AI",
ovhcloud: "OVHcloud AI Endpoints",
oca: "Oracle Code Assist",
inception: "Inception",
synthetic: "Synthetic",
"sap-ai-core": "SAP AI Core",
Expand Down
3 changes: 3 additions & 0 deletions cli/src/constants/providers/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ export type RouterName =
| "deepinfra"
| "vercel-ai-gateway"
| "ovhcloud"
| "oca"

/**
* ModelInfo interface - mirrors the one from packages/types/src/model.ts
Expand Down Expand Up @@ -132,6 +133,7 @@ export const PROVIDER_TO_ROUTER_NAME: Record<ProviderName, RouterName | null> =
"io-intelligence": "io-intelligence",
"vercel-ai-gateway": "vercel-ai-gateway",
ovhcloud: "ovhcloud",
oca: "oca",
// Providers without dynamic model support
anthropic: null,
bedrock: null,
Expand Down Expand Up @@ -184,6 +186,7 @@ export const PROVIDER_MODEL_FIELD: Record<ProviderName, string | null> = {
"io-intelligence": "ioIntelligenceModelId",
"vercel-ai-gateway": "vercelAiGatewayModelId",
ovhcloud: "ovhCloudAiEndpointsModelId",
oca: "apiModelId",
// Providers without dynamic model support
anthropic: null,
bedrock: null,
Expand Down
1 change: 1 addition & 0 deletions cli/src/constants/providers/settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1086,6 +1086,7 @@ export const PROVIDER_DEFAULT_MODELS: Record<ProviderName, string> = {
unbound: "gpt-4o",
requesty: "gpt-4o",
roo: "gpt-4o",
oca: "",
"vercel-ai-gateway": "gpt-4o",
"virtual-quota-fallback": "gpt-4o",
"human-relay": "human",
Expand Down
1 change: 1 addition & 0 deletions cli/src/constants/providers/validation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ export const PROVIDER_REQUIRED_FIELDS: Record<ProviderName, string[]> = {
"fake-ai": ["apiModelId"],
ovhcloud: ["ovhCloudAiEndpointsApiKey", "ovhCloudAiEndpointsModelId"],
inception: ["inceptionLabsApiKey", "inceptionLabsModelId"],
oca: [],
synthetic: ["syntheticApiKey", "apiModelId"],
"sap-ai-core": ["sapAiCoreServiceKey", "sapAiCoreResourceGroup", "sapAiCoreDeploymentId", "sapAiCoreModelId"],
// Special cases handled separately in handleSpecialValidations
Expand Down
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,8 @@
"brace-expansion": "^2.0.2",
"form-data": ">=4.0.4",
"bluebird": ">=3.7.2",
"glob": ">=11.1.0"
"glob": ">=11.1.0",
"fsevents": "^2.3.3"
}
}
}
1 change: 1 addition & 0 deletions packages/types/src/model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ export const modelInfoSchema = z.object({
cacheWritesPrice: z.number().optional(),
cacheReadsPrice: z.number().optional(),
description: z.string().optional(),
banner: z.string().optional(),
// Default effort value for models that support reasoning effort
reasoningEffort: reasoningEffortExtendedSchema.optional(),
minTokensPerCachePoint: z.number().optional(),
Expand Down
7 changes: 7 additions & 0 deletions packages/types/src/provider-settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ export const dynamicProviders = [
"requesty",
"unbound",
"glama",
"oca",
"roo",
"chutes",
"nano-gpt", //kilocode_change
Expand Down Expand Up @@ -352,6 +353,8 @@ const openAiNativeSchema = apiModelIdProviderModelSchema.extend({
openAiNativeServiceTier: serviceTierSchema.optional(),
})

const ocaSchema = baseProviderSettingsSchema.extend({})

const mistralSchema = apiModelIdProviderModelSchema.extend({
mistralApiKey: z.string().optional(),
mistralCodestralUrl: z.string().optional(),
Expand Down Expand Up @@ -567,6 +570,7 @@ export const providerSettingsSchemaDiscriminated = z.discriminatedUnion("apiProv
humanRelaySchema.merge(z.object({ apiProvider: z.literal("human-relay") })),
fakeAiSchema.merge(z.object({ apiProvider: z.literal("fake-ai") })),
xaiSchema.merge(z.object({ apiProvider: z.literal("xai") })),
ocaSchema.merge(z.object({ apiProvider: z.literal("oca") })),
// kilocode_change start
geminiCliSchema.merge(z.object({ apiProvider: z.literal("gemini-cli") })),
kilocodeSchema.merge(z.object({ apiProvider: z.literal("kilocode") })),
Expand Down Expand Up @@ -626,6 +630,7 @@ export const providerSettingsSchema = z.object({
...humanRelaySchema.shape,
...fakeAiSchema.shape,
...xaiSchema.shape,
...ocaSchema.shape,
...groqSchema.shape,
...basetenSchema.shape,
...huggingFaceSchema.shape,
Expand Down Expand Up @@ -739,6 +744,7 @@ export const modelIdKeysByProvider: Record<TypicalProvider, ModelIdKey> = {
fireworks: "apiModelId",
featherless: "apiModelId",
"io-intelligence": "ioIntelligenceModelId",
oca: "apiModelId",
roo: "apiModelId",
"vercel-ai-gateway": "vercelAiGatewayModelId",
"virtual-quota-fallback": "apiModelId",
Expand Down Expand Up @@ -852,6 +858,7 @@ export const MODELS_BY_PROVIDER: Record<
models: Object.keys(openAiNativeModels),
},
"qwen-code": { id: "qwen-code", label: "Qwen Code", models: Object.keys(qwenCodeModels) },
oca: { id: "oca", label: "Oracle Code Assist", models: [] },
roo: { id: "roo", label: "Roo Code Cloud", models: [] },
sambanova: {
id: "sambanova",
Expand Down
1,075 changes: 319 additions & 756 deletions pnpm-lock.yaml

Large diffs are not rendered by default.

3 changes: 3 additions & 0 deletions src/api/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ import {
FeatherlessHandler,
VercelAiGatewayHandler,
DeepInfraHandler,
OcaHandler,
// MiniMaxHandler, // kilocode_change
BasetenHandler,
} from "./providers"
Expand Down Expand Up @@ -241,6 +242,8 @@ export function buildApiHandler(configuration: ProviderSettings): ApiHandler {
return new FeatherlessHandler(options)
case "vercel-ai-gateway":
return new VercelAiGatewayHandler(options)
case "oca":
return new OcaHandler(options)
case "minimax":
return new MiniMaxAnthropicHandler(options) // kilocode_change: anthropic
case "baseten":
Expand Down
5 changes: 5 additions & 0 deletions src/api/providers/fetchers/modelCache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ import { getOpenRouterModels } from "./openrouter"
import { getVercelAiGatewayModels } from "./vercel-ai-gateway"
import { getRequestyModels } from "./requesty"
import { getGlamaModels } from "./glama"
import { getOCAModels } from "./oca"
import { getUnboundModels } from "./unbound"
import { getLiteLLMModels } from "./litellm"
import { GetModelsOptions } from "../../../shared/api"
Expand All @@ -35,6 +36,7 @@ import { getSyntheticModels } from "./synthetic"
import { getSapAiCoreModels } from "./sap-ai-core"
// kilocode_change end

import { DEFAULT_OCA_BASE_URL } from "../oca/utils/constants"
import { getDeepInfraModels } from "./deepinfra"
import { getHuggingFaceModels } from "./huggingface"
import { getRooModels } from "./roo"
Expand Down Expand Up @@ -140,6 +142,9 @@ async function fetchModelsFromProvider(options: GetModelsOptions): Promise<Model
case "huggingface":
models = await getHuggingFaceModels()
break
case "oca":
models = await getOCAModels(options.baseUrl ?? DEFAULT_OCA_BASE_URL, options.apiKey)
break
// kilocode_change start
case "sap-ai-core":
models = await getSapAiCoreModels(
Expand Down
123 changes: 123 additions & 0 deletions src/api/providers/fetchers/oca.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
import axios from "axios"

import { getOcaClientInfo } from "../oca/utils/getOcaClientInfo"
import type { ModelRecord } from "../../../shared/api"
import type { ModelInfo } from "@roo-code/types"

export function getAxiosSettings(): { adapter?: any } {
return { adapter: "fetch" as any }
}

export interface HttpClient {
get: (url: string, config?: any) => Promise<{ status: number; data: any }>
}

const defaultHttpClient: HttpClient = {
get: (url, config) => axios.get(url, config),
}

export function resolveOcaModelInfoUrl(baseUrl: string): string {
const url = new URL(baseUrl)
url.pathname = `${url.pathname.replace(/\/+$/, "")}/v1/model/info`
return url.toString()
}

export function buildOcaHeaders(accessToken?: string): Record<string, string> {
const { client, clientVersion, clientIde, clientIdeVersion } = getOcaClientInfo()

const headers: Record<string, string> = {
"Content-Type": "application/json",
client: client,
"client-version": clientVersion,
"client-ide": clientIde,
"client-ide-version": clientIdeVersion,
}
if (accessToken) headers["Authorization"] = `Bearer ${accessToken}`
return headers
}

const DEFAULT_TIMEOUT_MS = 5000

function parsePrice(price: any): number | undefined {
if (price !== undefined && price !== null) {
return parseFloat(price) * 1_000_000
}
return undefined
}

export async function getOCAModels(
baseUrl: string,
accessToken?: string,
httpClient: HttpClient = defaultHttpClient,
): Promise<ModelRecord> {
if (!baseUrl || typeof baseUrl !== "string" || baseUrl.trim().length === 0) {
return {}
}

const url = resolveOcaModelInfoUrl(baseUrl)
const headers = buildOcaHeaders(accessToken)

try {
const response = await httpClient.get(url, {
headers,
timeout: DEFAULT_TIMEOUT_MS,
...getAxiosSettings(),
})

const dataArray: any[] = Array.isArray(response?.data?.data) ? response.data.data : []

const models: ModelRecord = {}

for (const model of dataArray) {
const modelId = model?.litellm_params?.model
if (typeof modelId !== "string" || !modelId) continue

const info = model?.model_info || {}

const maxTokens =
typeof model?.litellm_params?.max_tokens === "number" ? model.litellm_params.max_tokens : -1
const contextWindow =
typeof info?.context_window === "number" && info.context_window > 0 ? info.context_window : 0

const baseInfo: ModelInfo = {
maxTokens,
contextWindow,
supportsImages: !!info?.supports_vision,
supportsPromptCache: !!info?.supports_caching,
inputPrice: parsePrice(info?.input_price),
outputPrice: parsePrice(info?.output_price),
cacheWritesPrice: parsePrice(info?.caching_price),
cacheReadsPrice: parsePrice(info?.cached_price),
description: info?.description,
banner: info?.banner,
}

models[modelId] = baseInfo
}

return models
} catch (error: any) {
console.error("Failed to fetch models", error)

let userMsg: string
const resp = error?.response
const req = error?.request
const status = resp?.status
const statusText = resp?.statusText
const headers = resp?.headers ?? {}

if (resp) {
userMsg = `Did you set up your OCA access through entitlements? OCA service returned ${status ?? "unknown"} ${statusText ?? "Unknown Status"}.`
} else if (req) {
userMsg =
"Only environment variable based proxy settings is supported. PAC/WPAD files(Ex: http://wpad/wpad.dat) are not supported in kilocode. Remove if any WPAD/PAC reference from your IDE proxy settings, restart the IDE, and try again. (Refer OCA Kilo troubleshooting guide.)"
} else {
userMsg = error?.message || "Error occurred while fetching OCA models."
console.error(userMsg, error)
}

const opcRequestId = headers?.["opc-request-id"]
const suffix = opcRequestId ? ` opc-request-id: ${opcRequestId}` : ""
throw new Error(`Error refreshing OCA models. ${userMsg}${suffix}`)
}
}
1 change: 1 addition & 0 deletions src/api/providers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,5 +44,6 @@ export { RooHandler } from "./roo"
export { FeatherlessHandler } from "./featherless"
export { VercelAiGatewayHandler } from "./vercel-ai-gateway"
export { DeepInfraHandler } from "./deepinfra"
export { OcaHandler } from "./oca-handler"
export { MiniMaxHandler } from "./minimax"
export { BasetenHandler } from "./baseten"
Loading