From 26a9223ba208452b140af7ede13b61b92015b433 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 28 Oct 2025 16:56:04 +0000 Subject: [PATCH 1/2] changeset version bump --- .changeset/v3.29.3.md | 14 -------------- CHANGELOG.md | 6 ++++++ src/package.json | 2 +- 3 files changed, 7 insertions(+), 15 deletions(-) delete mode 100644 .changeset/v3.29.3.md diff --git a/.changeset/v3.29.3.md b/.changeset/v3.29.3.md deleted file mode 100644 index 2a7cd9359d..0000000000 --- a/.changeset/v3.29.3.md +++ /dev/null @@ -1,14 +0,0 @@ ---- -"roo-cline": patch ---- - -- Update Gemini models with latest 09-2025 versions including Gemini 2.5 Pro and Flash (#8485 by @cleacos, PR by @roomote) -- Add reasoning support for Z.ai GLM binary thinking mode (#8465 by @BeWater799, PR by @daniel-lxs) -- Enable reasoning in Roo provider (thanks @mrubens!) -- Add settings to configure time and cost display in system prompt (#8450 by @jaxnb, PR by @roomote) -- Fix: Use max_output_tokens when available in LiteLLM fetcher (#8454 by @fabb, PR by @roomote) -- Fix: Process queued messages after context condensing completes (#8477 by @JosXa, PR by @roomote) -- Fix: Use monotonic clock for rate limiting to prevent timing issues (#7770 by @intermarkec, PR by @chrarnoldus) -- Fix: Resolve checkpoint menu popover overflow (thanks @daniel-lxs!) -- Fix: LiteLLM test failures after merge (thanks @daniel-lxs!) -- Improve UX: Focus textbox and add newlines after adding to context (thanks @mrubens!) diff --git a/CHANGELOG.md b/CHANGELOG.md index e19703e597..b001b2bbb8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # Roo Code Changelog +## 3.29.3 + +### Patch Changes + +- - Update Gemini models with latest 09-2025 versions including Gemini 2.5 Pro and Flash (#8485 by @cleacos, PR by @roomote) + ## [3.29.2] - 2025-10-27 - Add support for LongCat-Flash-Thinking-FP8 models in Chutes AI provider (#8425 by @leakless21, PR by @roomote) diff --git a/src/package.json b/src/package.json index 4217f1b8ef..6f9d170c4d 100644 --- a/src/package.json +++ b/src/package.json @@ -3,7 +3,7 @@ "displayName": "%extension.displayName%", "description": "%extension.description%", "publisher": "RooVeterinaryInc", - "version": "3.29.2", + "version": "3.29.3", "icon": "assets/icons/icon.png", "galleryBanner": { "color": "#617A91", From 22cef07ce03df45d9c59136ac6dd6aab89d161ec Mon Sep 17 00:00:00 2001 From: Matt Rubens Date: Tue, 28 Oct 2025 13:00:51 -0400 Subject: [PATCH 2/2] Revise CHANGELOG for version 3.29.3 updates Updated changelog for version 3.29.3 with patch changes and fixes. --- CHANGELOG.md | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b001b2bbb8..eabb4622fc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,17 @@ # Roo Code Changelog -## 3.29.3 - -### Patch Changes - -- - Update Gemini models with latest 09-2025 versions including Gemini 2.5 Pro and Flash (#8485 by @cleacos, PR by @roomote) +## [3.29.3] - 2025-10-28 + +- Update Gemini models with latest 09-2025 versions including Gemini 2.5 Pro and Flash (#8485 by @cleacos, PR by @roomote) +- Add reasoning support for Z.ai GLM binary thinking mode (#8465 by @BeWater799, PR by @daniel-lxs) +- Enable reasoning in Roo provider (thanks @mrubens!) +- Add settings to configure time and cost display in system prompt (#8450 by @jaxnb, PR by @roomote) +- Fix: Use max_output_tokens when available in LiteLLM fetcher (#8454 by @fabb, PR by @roomote) +- Fix: Process queued messages after context condensing completes (#8477 by @JosXa, PR by @roomote) +- Fix: Use monotonic clock for rate limiting to prevent timing issues (#7770 by @intermarkec, PR by @chrarnoldus) +- Fix: Resolve checkpoint menu popover overflow (thanks @daniel-lxs!) +- Fix: LiteLLM test failures after merge (thanks @daniel-lxs!) +- Improve UX: Focus textbox and add newlines after adding to context (thanks @mrubens!) ## [3.29.2] - 2025-10-27