diff --git a/packages/types/src/provider-settings.ts b/packages/types/src/provider-settings.ts index 6d628ddfdff..730f2f6d577 100644 --- a/packages/types/src/provider-settings.ts +++ b/packages/types/src/provider-settings.ts @@ -190,6 +190,7 @@ const ollamaSchema = baseProviderSettingsSchema.extend({ ollamaModelId: z.string().optional(), ollamaBaseUrl: z.string().optional(), ollamaApiKey: z.string().optional(), + ollamaNumCtx: z.number().int().min(128).optional(), }) const vsCodeLmSchema = baseProviderSettingsSchema.extend({ diff --git a/src/api/providers/__tests__/native-ollama.spec.ts b/src/api/providers/__tests__/native-ollama.spec.ts index f8792937dbc..4ddeb909bb6 100644 --- a/src/api/providers/__tests__/native-ollama.spec.ts +++ b/src/api/providers/__tests__/native-ollama.spec.ts @@ -73,6 +73,61 @@ describe("NativeOllamaHandler", () => { expect(results[2]).toEqual({ type: "usage", inputTokens: 10, outputTokens: 2 }) }) + it("should not include num_ctx by default", async () => { + // Mock the chat response + mockChat.mockImplementation(async function* () { + yield { message: { content: "Response" } } + }) + + const stream = handler.createMessage("System", [{ role: "user" as const, content: "Test" }]) + + // Consume the stream + for await (const _ of stream) { + // consume stream + } + + // Verify that num_ctx was NOT included in the options + expect(mockChat).toHaveBeenCalledWith( + expect.objectContaining({ + options: expect.not.objectContaining({ + num_ctx: expect.anything(), + }), + }), + ) + }) + + it("should include num_ctx when explicitly set via ollamaNumCtx", async () => { + const options: ApiHandlerOptions = { + apiModelId: "llama2", + ollamaModelId: "llama2", + ollamaBaseUrl: "http://localhost:11434", + ollamaNumCtx: 8192, // Explicitly set num_ctx + } + + handler = new NativeOllamaHandler(options) + + // Mock the chat response + mockChat.mockImplementation(async function* () { + yield { message: { content: "Response" } } + }) + + const stream = handler.createMessage("System", [{ role: "user" as const, content: "Test" }]) + + // Consume the stream + for await (const _ of stream) { + // consume stream + } + + // Verify that num_ctx was included with the specified value + expect(mockChat).toHaveBeenCalledWith( + expect.objectContaining({ + options: expect.objectContaining({ + num_ctx: 8192, + }), + }), + ) + }) + it("should handle DeepSeek R1 models with reasoning detection", async () => { const options: ApiHandlerOptions = { apiModelId: "deepseek-r1", @@ -120,6 +175,49 @@ describe("NativeOllamaHandler", () => { }) expect(result).toBe("This is the response") }) + + it("should not include num_ctx in completePrompt by default", async () => { + mockChat.mockResolvedValue({ + message: { content: "Response" }, + }) + + await handler.completePrompt("Test prompt") + + // Verify that num_ctx was NOT included in the options + expect(mockChat).toHaveBeenCalledWith( + expect.objectContaining({ + options: expect.not.objectContaining({ + num_ctx: expect.anything(), + }), + }), + ) + }) + + it("should include num_ctx in completePrompt when explicitly set", async () => { + const options: ApiHandlerOptions = { + apiModelId: "llama2", + ollamaModelId: "llama2", + ollamaBaseUrl: "http://localhost:11434", + ollamaNumCtx: 4096, // Explicitly set num_ctx + } + + handler = new NativeOllamaHandler(options) + + mockChat.mockResolvedValue({ + message: { content: "Response" }, + }) + + await handler.completePrompt("Test prompt") + + // Verify that num_ctx was included with the specified value + expect(mockChat).toHaveBeenCalledWith( + expect.objectContaining({ + options: expect.objectContaining({ + num_ctx: 4096, + }), + }), + ) + }) }) describe("error handling", () => { diff --git a/src/api/providers/native-ollama.ts b/src/api/providers/native-ollama.ts index 80231540e8e..83a5c7b36ea 100644 --- a/src/api/providers/native-ollama.ts +++ b/src/api/providers/native-ollama.ts @@ -8,6 +8,11 @@ import { getOllamaModels } from "./fetchers/ollama" import { XmlMatcher } from "../../utils/xml-matcher" import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from "../index" +interface OllamaChatOptions { + temperature: number + num_ctx?: number +} + function convertToOllamaMessages(anthropicMessages: Anthropic.Messages.MessageParam[]): Message[] { const ollamaMessages: Message[] = [] @@ -184,15 +189,22 @@ export class NativeOllamaHandler extends BaseProvider implements SingleCompletio ) try { + // Build options object conditionally + const chatOptions: OllamaChatOptions = { + temperature: this.options.modelTemperature ?? (useR1Format ? DEEP_SEEK_DEFAULT_TEMPERATURE : 0), + } + + // Only include num_ctx if explicitly set via ollamaNumCtx + if (this.options.ollamaNumCtx !== undefined) { + chatOptions.num_ctx = this.options.ollamaNumCtx + } + // Create the actual API request promise const stream = await client.chat({ model: modelId, messages: ollamaMessages, stream: true, - options: { - num_ctx: modelInfo.contextWindow, - temperature: this.options.modelTemperature ?? (useR1Format ? DEEP_SEEK_DEFAULT_TEMPERATURE : 0), - }, + options: chatOptions, }) let totalInputTokens = 0 @@ -274,13 +286,21 @@ export class NativeOllamaHandler extends BaseProvider implements SingleCompletio const { id: modelId } = await this.fetchModel() const useR1Format = modelId.toLowerCase().includes("deepseek-r1") + // Build options object conditionally + const chatOptions: OllamaChatOptions = { + temperature: this.options.modelTemperature ?? (useR1Format ? DEEP_SEEK_DEFAULT_TEMPERATURE : 0), + } + + // Only include num_ctx if explicitly set via ollamaNumCtx + if (this.options.ollamaNumCtx !== undefined) { + chatOptions.num_ctx = this.options.ollamaNumCtx + } + const response = await client.chat({ model: modelId, messages: [{ role: "user", content: prompt }], stream: false, - options: { - temperature: this.options.modelTemperature ?? (useR1Format ? DEEP_SEEK_DEFAULT_TEMPERATURE : 0), - }, + options: chatOptions, }) return response.message?.content || "" diff --git a/src/shared/api.ts b/src/shared/api.ts index 2a327595749..88c21060983 100644 --- a/src/shared/api.ts +++ b/src/shared/api.ts @@ -14,6 +14,12 @@ export type ApiHandlerOptions = Omit & { * Defaults to true; set to false to disable summaries. */ enableGpt5ReasoningSummary?: boolean + /** + * Optional override for Ollama's num_ctx parameter. + * When set, this value will be used in Ollama chat requests. + * When undefined, Ollama will use the model's default num_ctx from the Modelfile. + */ + ollamaNumCtx?: number } // RouterName diff --git a/webview-ui/src/components/settings/providers/Ollama.tsx b/webview-ui/src/components/settings/providers/Ollama.tsx index b3ff00ccdda..615d3be4098 100644 --- a/webview-ui/src/components/settings/providers/Ollama.tsx +++ b/webview-ui/src/components/settings/providers/Ollama.tsx @@ -130,6 +130,26 @@ export const Ollama = ({ apiConfiguration, setApiConfigurationField }: OllamaPro ))} )} + { + const value = e.target?.value + if (value === "") { + setApiConfigurationField("ollamaNumCtx", undefined) + } else { + const numValue = parseInt(value, 10) + if (!isNaN(numValue) && numValue >= 128) { + setApiConfigurationField("ollamaNumCtx", numValue) + } + } + }} + placeholder="e.g., 4096" + className="w-full"> + +
+ {t("settings:providers.ollama.numCtxHelp")} +
+
{t("settings:providers.ollama.description")} {t("settings:providers.ollama.warning")} diff --git a/webview-ui/src/components/ui/hooks/useSelectedModel.ts b/webview-ui/src/components/ui/hooks/useSelectedModel.ts index f8a005e86a1..3a24df2f854 100644 --- a/webview-ui/src/components/ui/hooks/useSelectedModel.ts +++ b/webview-ui/src/components/ui/hooks/useSelectedModel.ts @@ -263,9 +263,17 @@ function getSelectedModel({ case "ollama": { const id = apiConfiguration.ollamaModelId ?? "" const info = ollamaModels && ollamaModels[apiConfiguration.ollamaModelId!] + + const adjustedInfo = + info?.contextWindow && + apiConfiguration?.ollamaNumCtx && + apiConfiguration.ollamaNumCtx < info.contextWindow + ? { ...info, contextWindow: apiConfiguration.ollamaNumCtx } + : info + return { id, - info: info || undefined, + info: adjustedInfo || undefined, } } case "lmstudio": { diff --git a/webview-ui/src/i18n/locales/ca/settings.json b/webview-ui/src/i18n/locales/ca/settings.json index 4f3e4d99249..cddba8351e3 100644 --- a/webview-ui/src/i18n/locales/ca/settings.json +++ b/webview-ui/src/i18n/locales/ca/settings.json @@ -380,6 +380,8 @@ "modelId": "ID del model", "apiKey": "Clau API d'Ollama", "apiKeyHelp": "Clau API opcional per a instàncies d'Ollama autenticades o serveis al núvol. Deixa-ho buit per a instal·lacions locals.", + "numCtx": "Mida de la finestra de context (num_ctx)", + "numCtxHelp": "Sobreescriu la mida de la finestra de context per defecte del model. Deixeu-ho en blanc per utilitzar la configuració del Modelfile del model. El valor mínim és 128.", "description": "Ollama permet executar models localment al vostre ordinador. Per a instruccions sobre com començar, consulteu la Guia d'inici ràpid.", "warning": "Nota: Roo Code utilitza prompts complexos i funciona millor amb models Claude. Els models menys capaços poden no funcionar com s'espera." }, diff --git a/webview-ui/src/i18n/locales/de/settings.json b/webview-ui/src/i18n/locales/de/settings.json index 4e75f6af2a0..4d9ac3f33f9 100644 --- a/webview-ui/src/i18n/locales/de/settings.json +++ b/webview-ui/src/i18n/locales/de/settings.json @@ -380,6 +380,8 @@ "modelId": "Modell-ID", "apiKey": "Ollama API-Schlüssel", "apiKeyHelp": "Optionaler API-Schlüssel für authentifizierte Ollama-Instanzen oder Cloud-Services. Leer lassen für lokale Installationen.", + "numCtx": "Kontextfenstergröße (num_ctx)", + "numCtxHelp": "Überschreibt die Standard-Kontextfenstergröße des Modells. Lassen Sie das Feld leer, um die Modelfile-Konfiguration des Modells zu verwenden. Der Mindestwert ist 128.", "description": "Ollama ermöglicht es dir, Modelle lokal auf deinem Computer auszuführen. Eine Anleitung zum Einstieg findest du im Schnellstart-Guide.", "warning": "Hinweis: Roo Code verwendet komplexe Prompts und funktioniert am besten mit Claude-Modellen. Weniger leistungsfähige Modelle funktionieren möglicherweise nicht wie erwartet." }, diff --git a/webview-ui/src/i18n/locales/en/settings.json b/webview-ui/src/i18n/locales/en/settings.json index 1be824b37e3..93dc073a6ac 100644 --- a/webview-ui/src/i18n/locales/en/settings.json +++ b/webview-ui/src/i18n/locales/en/settings.json @@ -379,6 +379,8 @@ "modelId": "Model ID", "apiKey": "Ollama API Key", "apiKeyHelp": "Optional API key for authenticated Ollama instances or cloud services. Leave empty for local installations.", + "numCtx": "Context Window Size (num_ctx)", + "numCtxHelp": "Override the model's default context window size. Leave empty to use the model's Modelfile configuration. Minimum value is 128.", "description": "Ollama allows you to run models locally on your computer. For instructions on how to get started, see their quickstart guide.", "warning": "Note: Roo Code uses complex prompts and works best with Claude models. Less capable models may not work as expected." }, diff --git a/webview-ui/src/i18n/locales/es/settings.json b/webview-ui/src/i18n/locales/es/settings.json index deb2bc7a227..f608190fc49 100644 --- a/webview-ui/src/i18n/locales/es/settings.json +++ b/webview-ui/src/i18n/locales/es/settings.json @@ -380,6 +380,8 @@ "modelId": "ID del modelo", "apiKey": "Clave API de Ollama", "apiKeyHelp": "Clave API opcional para instancias de Ollama autenticadas o servicios en la nube. Deja vacío para instalaciones locales.", + "numCtx": "Tamaño de la ventana de contexto (num_ctx)", + "numCtxHelp": "Sobrescribe el tamaño de la ventana de contexto predeterminado del modelo. Déjelo vacío para usar la configuración del Modelfile del modelo. El valor mínimo es 128.", "description": "Ollama le permite ejecutar modelos localmente en su computadora. Para obtener instrucciones sobre cómo comenzar, consulte la guía de inicio rápido.", "warning": "Nota: Roo Code utiliza prompts complejos y funciona mejor con modelos Claude. Los modelos menos capaces pueden no funcionar como se espera." }, diff --git a/webview-ui/src/i18n/locales/fr/settings.json b/webview-ui/src/i18n/locales/fr/settings.json index ccb8e61d7a0..939e5e55946 100644 --- a/webview-ui/src/i18n/locales/fr/settings.json +++ b/webview-ui/src/i18n/locales/fr/settings.json @@ -380,6 +380,8 @@ "modelId": "ID du modèle", "apiKey": "Clé API Ollama", "apiKeyHelp": "Clé API optionnelle pour les instances Ollama authentifiées ou les services cloud. Laissez vide pour les installations locales.", + "numCtx": "Taille de la fenêtre de contexte (num_ctx)", + "numCtxHelp": "Remplace la taille de la fenêtre de contexte par défaut du modèle. Laissez vide pour utiliser la configuration du Modelfile du modèle. La valeur minimale est 128.", "description": "Ollama vous permet d'exécuter des modèles localement sur votre ordinateur. Pour obtenir des instructions sur la mise en route, consultez le guide de démarrage rapide.", "warning": "Remarque : Roo Code utilise des prompts complexes et fonctionne mieux avec les modèles Claude. Les modèles moins performants peuvent ne pas fonctionner comme prévu." }, diff --git a/webview-ui/src/i18n/locales/hi/settings.json b/webview-ui/src/i18n/locales/hi/settings.json index 3d879e2ca74..adb3e1cbd10 100644 --- a/webview-ui/src/i18n/locales/hi/settings.json +++ b/webview-ui/src/i18n/locales/hi/settings.json @@ -380,6 +380,8 @@ "modelId": "मॉडल ID", "apiKey": "Ollama API Key", "apiKeyHelp": "प्रमाणित Ollama इंस्टेंसेस या क्लाउड सेवाओं के लिए वैकल्पिक API key। स्थानीय इंस्टॉलेशन के लिए खाली छोड़ें।", + "numCtx": "संदर्भ विंडो आकार (num_ctx)", + "numCtxHelp": "मॉडल के डिफ़ॉल्ट संदर्भ विंडो आकार को ओवरराइड करें। मॉडल की मॉडलफ़ाइल कॉन्फ़िगरेशन का उपयोग करने के लिए खाली छोड़ दें। न्यूनतम मान 128 है।", "description": "Ollama आपको अपने कंप्यूटर पर स्थानीय रूप से मॉडल चलाने की अनुमति देता है। आरंभ करने के निर्देशों के लिए, उनकी क्विकस्टार्ट गाइड देखें।", "warning": "नोट: Roo Code जटिल प्रॉम्प्ट्स का उपयोग करता है और Claude मॉडल के साथ सबसे अच्छा काम करता है। कम क्षमता वाले मॉडल अपेक्षित रूप से काम नहीं कर सकते हैं।" }, diff --git a/webview-ui/src/i18n/locales/id/settings.json b/webview-ui/src/i18n/locales/id/settings.json index 8138726c335..97c9282741d 100644 --- a/webview-ui/src/i18n/locales/id/settings.json +++ b/webview-ui/src/i18n/locales/id/settings.json @@ -384,6 +384,8 @@ "modelId": "Model ID", "apiKey": "Ollama API Key", "apiKeyHelp": "API key opsional untuk instance Ollama yang terautentikasi atau layanan cloud. Biarkan kosong untuk instalasi lokal.", + "numCtx": "Ukuran Jendela Konteks (num_ctx)", + "numCtxHelp": "Ganti ukuran jendela konteks default model. Biarkan kosong untuk menggunakan konfigurasi Modelfile model. Nilai minimum adalah 128.", "description": "Ollama memungkinkan kamu menjalankan model secara lokal di komputer. Untuk instruksi cara memulai, lihat panduan quickstart mereka.", "warning": "Catatan: Roo Code menggunakan prompt kompleks dan bekerja terbaik dengan model Claude. Model yang kurang mampu mungkin tidak bekerja seperti yang diharapkan." }, diff --git a/webview-ui/src/i18n/locales/it/settings.json b/webview-ui/src/i18n/locales/it/settings.json index 80ff0f8a718..9c7bc314ce4 100644 --- a/webview-ui/src/i18n/locales/it/settings.json +++ b/webview-ui/src/i18n/locales/it/settings.json @@ -380,8 +380,10 @@ "modelId": "ID modello", "apiKey": "Chiave API Ollama", "apiKeyHelp": "Chiave API opzionale per istanze Ollama autenticate o servizi cloud. Lascia vuoto per installazioni locali.", + "numCtx": "Dimensione della finestra di contesto (num_ctx)", + "numCtxHelp": "Sovrascrive la dimensione predefinita della finestra di contesto del modello. Lasciare vuoto per utilizzare la configurazione del Modelfile del modello. Il valore minimo è 128.", "description": "Ollama ti permette di eseguire modelli localmente sul tuo computer. Per iniziare, consulta la guida rapida.", - "warning": "Nota: Roo Code utilizza prompt complessi e funziona meglio con i modelli Claude. I modelli con capacità inferiori potrebbero non funzionare come previsto." + "warning": "Nota: Roo Code utiliza prompt complessi e funziona meglio con i modelli Claude. I modelli con capacità inferiori potrebbero non funzionare come previsto." }, "unboundApiKey": "Chiave API Unbound", "getUnboundApiKey": "Ottieni chiave API Unbound", diff --git a/webview-ui/src/i18n/locales/ja/settings.json b/webview-ui/src/i18n/locales/ja/settings.json index 264d774473b..4535317fadc 100644 --- a/webview-ui/src/i18n/locales/ja/settings.json +++ b/webview-ui/src/i18n/locales/ja/settings.json @@ -380,6 +380,8 @@ "modelId": "モデルID", "apiKey": "Ollama APIキー", "apiKeyHelp": "認証されたOllamaインスタンスやクラウドサービス用のオプションAPIキー。ローカルインストールの場合は空のままにしてください。", + "numCtx": "コンテキストウィンドウサイズ (num_ctx)", + "numCtxHelp": "モデルのデフォルトのコンテキストウィンドウサイズを上書きします。モデルのModelfile構成を使用するには、空のままにします。最小値は128です。", "description": "Ollamaを使用すると、ローカルコンピューターでモデルを実行できます。始め方については、クイックスタートガイドをご覧ください。", "warning": "注意:Roo Codeは複雑なプロンプトを使用し、Claudeモデルで最適に動作します。能力の低いモデルは期待通りに動作しない場合があります。" }, diff --git a/webview-ui/src/i18n/locales/ko/settings.json b/webview-ui/src/i18n/locales/ko/settings.json index e490e31f78e..c3bc2922f28 100644 --- a/webview-ui/src/i18n/locales/ko/settings.json +++ b/webview-ui/src/i18n/locales/ko/settings.json @@ -380,6 +380,8 @@ "modelId": "모델 ID", "apiKey": "Ollama API 키", "apiKeyHelp": "인증된 Ollama 인스턴스나 클라우드 서비스용 선택적 API 키. 로컬 설치의 경우 비워두세요.", + "numCtx": "컨텍스트 창 크기(num_ctx)", + "numCtxHelp": "모델의 기본 컨텍스트 창 크기를 재정의합니다. 모델의 Modelfile 구성을 사용하려면 비워 둡니다. 최소값은 128입니다.", "description": "Ollama를 사용하면 컴퓨터에서 로컬로 모델을 실행할 수 있습니다. 시작하는 방법은 빠른 시작 가이드를 참조하세요.", "warning": "참고: Roo Code는 복잡한 프롬프트를 사용하며 Claude 모델에서 가장 잘 작동합니다. 덜 강력한 모델은 예상대로 작동하지 않을 수 있습니다." }, diff --git a/webview-ui/src/i18n/locales/nl/settings.json b/webview-ui/src/i18n/locales/nl/settings.json index ee0ba193e5c..913230e5f81 100644 --- a/webview-ui/src/i18n/locales/nl/settings.json +++ b/webview-ui/src/i18n/locales/nl/settings.json @@ -380,6 +380,8 @@ "modelId": "Model-ID", "apiKey": "Ollama API-sleutel", "apiKeyHelp": "Optionele API-sleutel voor geauthenticeerde Ollama-instanties of cloudservices. Laat leeg voor lokale installaties.", + "numCtx": "Contextvenstergrootte (num_ctx)", + "numCtxHelp": "Overschrijft de standaard contextvenstergrootte van het model. Laat leeg om de Modelfile-configuratie van het model te gebruiken. De minimumwaarde is 128.", "description": "Ollama laat je modellen lokaal op je computer draaien. Zie hun quickstart-gids voor instructies.", "warning": "Let op: Roo Code gebruikt complexe prompts en werkt het beste met Claude-modellen. Minder krachtige modellen werken mogelijk niet zoals verwacht." }, diff --git a/webview-ui/src/i18n/locales/pl/settings.json b/webview-ui/src/i18n/locales/pl/settings.json index 2d30547d9f4..b6fbd3def27 100644 --- a/webview-ui/src/i18n/locales/pl/settings.json +++ b/webview-ui/src/i18n/locales/pl/settings.json @@ -380,6 +380,8 @@ "modelId": "ID modelu", "apiKey": "Klucz API Ollama", "apiKeyHelp": "Opcjonalny klucz API dla uwierzytelnionych instancji Ollama lub usług chmurowych. Pozostaw puste dla instalacji lokalnych.", + "numCtx": "Rozmiar okna kontekstu (num_ctx)", + "numCtxHelp": "Zastępuje domyślny rozmiar okna kontekstu modelu. Pozostaw puste, aby użyć konfiguracji Modelfile modelu. Minimalna wartość to 128.", "description": "Ollama pozwala na lokalne uruchamianie modeli na twoim komputerze. Aby rozpocząć, zapoznaj się z przewodnikiem szybkiego startu.", "warning": "Uwaga: Roo Code używa złożonych podpowiedzi i działa najlepiej z modelami Claude. Modele o niższych możliwościach mogą nie działać zgodnie z oczekiwaniami." }, diff --git a/webview-ui/src/i18n/locales/pt-BR/settings.json b/webview-ui/src/i18n/locales/pt-BR/settings.json index 338ab9f6b1f..45c4077757a 100644 --- a/webview-ui/src/i18n/locales/pt-BR/settings.json +++ b/webview-ui/src/i18n/locales/pt-BR/settings.json @@ -380,6 +380,8 @@ "modelId": "ID do Modelo", "apiKey": "Chave API Ollama", "apiKeyHelp": "Chave API opcional para instâncias Ollama autenticadas ou serviços em nuvem. Deixe vazio para instalações locais.", + "numCtx": "Tamanho da janela de contexto (num_ctx)", + "numCtxHelp": "Substitui o tamanho da janela de contexto padrão do modelo. Deixe em branco para usar a configuração do Modelfile do modelo. O valor mínimo é 128.", "description": "O Ollama permite que você execute modelos localmente em seu computador. Para instruções sobre como começar, veja o guia de início rápido deles.", "warning": "Nota: O Roo Code usa prompts complexos e funciona melhor com modelos Claude. Modelos menos capazes podem não funcionar como esperado." }, diff --git a/webview-ui/src/i18n/locales/ru/settings.json b/webview-ui/src/i18n/locales/ru/settings.json index be494c571b0..686c2de90c7 100644 --- a/webview-ui/src/i18n/locales/ru/settings.json +++ b/webview-ui/src/i18n/locales/ru/settings.json @@ -380,6 +380,8 @@ "modelId": "ID модели", "apiKey": "API-ключ Ollama", "apiKeyHelp": "Опциональный API-ключ для аутентифицированных экземпляров Ollama или облачных сервисов. Оставьте пустым для локальных установок.", + "numCtx": "Размер контекстного окна (num_ctx)", + "numCtxHelp": "Переопределяет размер контекстного окна модели по умолчанию. Оставьте пустым, чтобы использовать конфигурацию Modelfile модели. Минимальное значение — 128.", "description": "Ollama позволяет запускать модели локально на вашем компьютере. Для начала ознакомьтесь с кратким руководством.", "warning": "Примечание: Roo Code использует сложные подсказки и лучше всего работает с моделями Claude. Менее мощные модели могут работать некорректно." }, diff --git a/webview-ui/src/i18n/locales/tr/settings.json b/webview-ui/src/i18n/locales/tr/settings.json index fe4508495ba..461b06de7cb 100644 --- a/webview-ui/src/i18n/locales/tr/settings.json +++ b/webview-ui/src/i18n/locales/tr/settings.json @@ -380,6 +380,8 @@ "modelId": "Model Kimliği", "apiKey": "Ollama API Anahtarı", "apiKeyHelp": "Kimlik doğrulamalı Ollama örnekleri veya bulut hizmetleri için isteğe bağlı API anahtarı. Yerel kurulumlar için boş bırakın.", + "numCtx": "Bağlam Penceresi Boyutu (num_ctx)", + "numCtxHelp": "Modelin varsayılan bağlam penceresi boyutunu geçersiz kılar. Modelin Modelfile yapılandırmasını kullanmak için boş bırakın. Minimum değer 128'dir.", "description": "Ollama, modelleri bilgisayarınızda yerel olarak çalıştırmanıza olanak tanır. Başlamak için hızlı başlangıç kılavuzlarına bakın.", "warning": "Not: Roo Code karmaşık istemler kullanır ve Claude modelleriyle en iyi şekilde çalışır. Daha az yetenekli modeller beklendiği gibi çalışmayabilir." }, diff --git a/webview-ui/src/i18n/locales/vi/settings.json b/webview-ui/src/i18n/locales/vi/settings.json index 0f03de47a51..f4a0f0bfb69 100644 --- a/webview-ui/src/i18n/locales/vi/settings.json +++ b/webview-ui/src/i18n/locales/vi/settings.json @@ -380,6 +380,8 @@ "modelId": "ID mô hình", "apiKey": "Khóa API Ollama", "apiKeyHelp": "Khóa API tùy chọn cho các phiên bản Ollama đã xác thực hoặc dịch vụ đám mây. Để trống cho cài đặt cục bộ.", + "numCtx": "Kích thước cửa sổ ngữ cảnh (num_ctx)", + "numCtxHelp": "Ghi đè kích thước cửa sổ ngữ cảnh mặc định của mô hình. Để trống để sử dụng cấu hình Modelfile của mô hình. Giá trị tối thiểu là 128.", "description": "Ollama cho phép bạn chạy các mô hình cục bộ trên máy tính của bạn. Để biết hướng dẫn về cách bắt đầu, xem hướng dẫn nhanh của họ.", "warning": "Lưu ý: Roo Code sử dụng các lời nhắc phức tạp và hoạt động tốt nhất với các mô hình Claude. Các mô hình kém mạnh hơn có thể không hoạt động như mong đợi." }, diff --git a/webview-ui/src/i18n/locales/zh-CN/settings.json b/webview-ui/src/i18n/locales/zh-CN/settings.json index 51db19562a4..02bac4055f9 100644 --- a/webview-ui/src/i18n/locales/zh-CN/settings.json +++ b/webview-ui/src/i18n/locales/zh-CN/settings.json @@ -380,6 +380,8 @@ "modelId": "模型 ID", "apiKey": "Ollama API 密钥", "apiKeyHelp": "用于已认证 Ollama 实例或云服务的可选 API 密钥。本地安装请留空。", + "numCtx": "上下文窗口大小 (num_ctx)", + "numCtxHelp": "覆盖模型的默认上下文窗口大小。留空以使用模型的 Modelfile 配置。最小值为 128。", "description": "Ollama 允许您在本地计算机上运行模型。有关如何开始使用的说明,请参阅其快速入门指南。", "warning": "注意:Roo Code 使用复杂的提示,与 Claude 模型配合最佳。功能较弱的模型可能无法按预期工作。" }, diff --git a/webview-ui/src/i18n/locales/zh-TW/settings.json b/webview-ui/src/i18n/locales/zh-TW/settings.json index 89d517f5b57..b5134ef91fa 100644 --- a/webview-ui/src/i18n/locales/zh-TW/settings.json +++ b/webview-ui/src/i18n/locales/zh-TW/settings.json @@ -380,6 +380,8 @@ "modelId": "模型 ID", "apiKey": "Ollama API 金鑰", "apiKeyHelp": "用於已認證 Ollama 執行個體或雲端服務的選用 API 金鑰。本機安裝請留空。", + "numCtx": "上下文視窗大小 (num_ctx)", + "numCtxHelp": "覆寫模型的預設上下文視窗大小。留空以使用模型的 Modelfile 設定。最小值為 128。", "description": "Ollama 允許您在本機電腦執行模型。請參閱快速入門指南。", "warning": "注意:Roo Code 使用複雜提示,與 Claude 模型搭配最佳。功能較弱的模型可能無法正常運作。" },