@@ -137,11 +137,8 @@ const ChannelTable = ({ Tab }: { Tab: React.ReactNode }) => {
| {item.name} |
-
- {t(providerData?.label as any)}
+
+ {parseI18nString(providerData.name, language)}
|
diff --git a/projects/app/src/pageComponents/account/model/Log/index.tsx b/projects/app/src/pageComponents/account/model/Log/index.tsx
index fe72b63dd75a..75e2e6aab88a 100644
--- a/projects/app/src/pageComponents/account/model/Log/index.tsx
+++ b/projects/app/src/pageComponents/account/model/Log/index.tsx
@@ -49,7 +49,8 @@ type LogDetailType = Omit & {
response_body?: string;
};
const ChannelLog = ({ Tab }: { Tab: React.ReactNode }) => {
- const { t } = useTranslation();
+ const { t, i18n } = useTranslation();
+ const language = i18n.language;
const { userInfo } = useUserStore();
const isRoot = userInfo?.username === 'root';
@@ -103,7 +104,7 @@ const ChannelLog = ({ Tab }: { Tab: React.ReactNode }) => {
const modelList = useMemo(() => {
const res = systemModelList
.map((item) => {
- const provider = getModelProvider(item.provider);
+ const provider = getModelProvider(item.provider, language);
return {
order: provider.order,
diff --git a/projects/app/src/pageComponents/account/model/ModelConfigTable.tsx b/projects/app/src/pageComponents/account/model/ModelConfigTable.tsx
index 1ae132ab7e92..2716c17fc8df 100644
--- a/projects/app/src/pageComponents/account/model/ModelConfigTable.tsx
+++ b/projects/app/src/pageComponents/account/model/ModelConfigTable.tsx
@@ -19,7 +19,7 @@ import {
import { useTranslation } from 'next-i18next';
import React, { useCallback, useMemo, useRef, useState } from 'react';
import {
- ModelProviderList,
+ getModelProviders,
type ModelProviderIdType,
getModelProvider
} from '@fastgpt/global/core/ai/provider';
@@ -59,20 +59,21 @@ const MyModal = dynamic(() => import('@fastgpt/web/components/common/MyModal'));
const ModelEditModal = dynamic(() => import('./AddModelBox').then((mod) => mod.ModelEditModal));
const ModelTable = ({ Tab }: { Tab: React.ReactNode }) => {
- const { t } = useTranslation();
+ const { t, i18n } = useTranslation();
+ const language = i18n.language;
const { userInfo } = useUserStore();
const { defaultModels, feConfigs } = useSystemStore();
const isRoot = userInfo?.username === 'root';
const [provider, setProvider] = useState('');
- const providerList = useRef<{ label: any; value: ModelProviderIdType | '' }[]>([
+ const providerList = useRef<{ label: React.ReactNode; value: ModelProviderIdType | '' }[]>([
{ label: t('common:All'), value: '' },
- ...ModelProviderList.map((item) => ({
+ ...getModelProviders(language).map((item) => ({
label: (
- {t(item.name as any)}
+ {item.name}
),
value: item.id
@@ -216,7 +217,7 @@ const ModelTable = ({ Tab }: { Tab: React.ReactNode }) => {
})();
const formatList = list.map((item) => {
- const provider = getModelProvider(item.provider);
+ const provider = getModelProvider(item.provider, language);
return {
...item,
avatar: provider.avatar,
@@ -239,7 +240,7 @@ const ModelTable = ({ Tab }: { Tab: React.ReactNode }) => {
});
return filterList;
- }, [systemModelList, t, modelType, provider, search, showActive]);
+ }, [systemModelList, t, modelType, language, provider, search, showActive]);
const activeModelLength = useMemo(() => {
return modelList.filter((item) => item.isActive).length;
}, [modelList]);
diff --git a/projects/app/src/pageComponents/account/model/ModelDashboard/index.tsx b/projects/app/src/pageComponents/account/model/ModelDashboard/index.tsx
index 88fb7610abe4..8a312620f6d4 100644
--- a/projects/app/src/pageComponents/account/model/ModelDashboard/index.tsx
+++ b/projects/app/src/pageComponents/account/model/ModelDashboard/index.tsx
@@ -57,7 +57,8 @@ const getDefaultDateRange = (): DateRangeType => {
};
const ModelDashboard = ({ Tab }: { Tab: React.ReactNode }) => {
- const { t } = useTranslation();
+ const { t, i18n } = useTranslation();
+ const language = i18n.language;
const theme = useTheme();
const { feConfigs } = useSystemStore();
@@ -113,7 +114,7 @@ const ModelDashboard = ({ Tab }: { Tab: React.ReactNode }) => {
const modelList = useMemo(() => {
const res = systemModelList
.map((item) => {
- const provider = getModelProvider(item.provider);
+ const provider = getModelProvider(item.provider, language);
return {
order: provider.order,
icon: provider.avatar,
diff --git a/projects/app/src/pageComponents/app/detail/Logs/LogChart.tsx b/projects/app/src/pageComponents/app/detail/Logs/LogChart.tsx
index a979d2fd78fd..0edad7986852 100644
--- a/projects/app/src/pageComponents/app/detail/Logs/LogChart.tsx
+++ b/projects/app/src/pageComponents/app/detail/Logs/LogChart.tsx
@@ -53,6 +53,7 @@ export type HeaderControlProps = {
setIsSelectAllSource: React.Dispatch>;
dateRange: DateRangeType;
setDateRange: (value: DateRangeType) => void;
+ px?: [number, number];
};
const chartBoxStyles = {
@@ -138,7 +139,8 @@ const LogChart = ({
setIsSelectAllSource,
dateRange,
setDateRange,
- showSourceSelector = true
+ showSourceSelector = true,
+ px = [4, 8]
}: HeaderControlProps) => {
const { t } = useTranslation();
@@ -334,6 +336,7 @@ const LogChart = ({
return (
-
+
@@ -765,7 +768,8 @@ const HeaderControl = ({
setIsSelectAllSource,
dateRange,
setDateRange,
- showSourceSelector = true
+ showSourceSelector = true,
+ px = [4, 8]
}: HeaderControlProps) => {
const { t } = useTranslation();
@@ -780,13 +784,7 @@ const HeaderControl = ({
console.log(showSourceSelector);
return (
-
+
{showSourceSelector && (
diff --git a/projects/app/src/pageComponents/app/detail/Logs/LogKeysConfigPopover.tsx b/projects/app/src/pageComponents/app/detail/Logs/LogKeysConfigPopover.tsx
index e317fbbe1489..2f4faf4cd957 100644
--- a/projects/app/src/pageComponents/app/detail/Logs/LogKeysConfigPopover.tsx
+++ b/projects/app/src/pageComponents/app/detail/Logs/LogKeysConfigPopover.tsx
@@ -10,13 +10,14 @@ import DndDrag, { Draggable } from '@fastgpt/web/components/common/DndDrag';
import MyIcon from '@fastgpt/web/components/common/Icon';
import React from 'react';
import type { AppLogKeysType } from '@fastgpt/global/core/app/logs/type';
+import type { SetState } from 'ahooks/lib/createUseStorageState';
const LogKeysConfigPopover = ({
logKeysList,
setLogKeysList
}: {
logKeysList: AppLogKeysType[];
- setLogKeysList: (logKeysList: AppLogKeysType[] | undefined) => void;
+ setLogKeysList: (value: SetState) => void;
}) => {
const { t } = useTranslation();
return (
diff --git a/projects/app/src/pageComponents/app/detail/Logs/LogTable.tsx b/projects/app/src/pageComponents/app/detail/Logs/LogTable.tsx
index e81f07a9c5d1..a029e16dc484 100644
--- a/projects/app/src/pageComponents/app/detail/Logs/LogTable.tsx
+++ b/projects/app/src/pageComponents/app/detail/Logs/LogTable.tsx
@@ -61,7 +61,8 @@ const LogTable = ({
setIsSelectAllSource,
dateRange,
setDateRange,
- showSourceSelector = true
+ showSourceSelector = true,
+ px = [4, 8]
}: HeaderControlProps) => {
const { t } = useTranslation();
const { feConfigs } = useSystemStore();
@@ -340,7 +341,7 @@ const LogTable = ({
});
return (
-
+
{showSourceSelector && (
diff --git a/projects/app/src/pageComponents/app/detail/Logs/SyncLogKeysPopover.tsx b/projects/app/src/pageComponents/app/detail/Logs/SyncLogKeysPopover.tsx
index b16a85cf0483..cf0733ccf07f 100644
--- a/projects/app/src/pageComponents/app/detail/Logs/SyncLogKeysPopover.tsx
+++ b/projects/app/src/pageComponents/app/detail/Logs/SyncLogKeysPopover.tsx
@@ -10,6 +10,7 @@ import { useContextSelector } from 'use-context-selector';
import { AppContext } from '../context';
import type { AppLogKeysType } from '@fastgpt/global/core/app/logs/type';
import type { getLogKeysResponse } from '@/pages/api/core/app/logs/getLogKeys';
+import type { SetState } from 'ahooks/lib/createUseStorageState';
const SyncLogKeysPopover = ({
logKeys,
@@ -18,7 +19,7 @@ const SyncLogKeysPopover = ({
fetchLogKeys
}: {
logKeys: AppLogKeysType[];
- setLogKeys: (logKeys: AppLogKeysType[]) => void;
+ setLogKeys: (value: SetState) => void;
teamLogKeys: AppLogKeysType[];
fetchLogKeys: () => Promise;
}) => {
diff --git a/projects/app/src/pageComponents/app/detail/WorkflowComponents/context/index.tsx b/projects/app/src/pageComponents/app/detail/WorkflowComponents/context/index.tsx
index 7781eec74d5f..11cbc8a991d6 100644
--- a/projects/app/src/pageComponents/app/detail/WorkflowComponents/context/index.tsx
+++ b/projects/app/src/pageComponents/app/detail/WorkflowComponents/context/index.tsx
@@ -697,6 +697,7 @@ const WorkflowContextProvider = ({
.map((node) => {
const status = checkNodeRunStatus({
node,
+ nodesMap: new Map(runtimeNodes.map((item) => [item.nodeId, item])),
runtimeEdges: debugData?.runtimeEdges || []
});
diff --git a/projects/app/src/pageComponents/app/detail/WorkflowComponents/context/workflowEventContext.tsx b/projects/app/src/pageComponents/app/detail/WorkflowComponents/context/workflowEventContext.tsx
index 3c917a8eb253..1405de5c2f52 100644
--- a/projects/app/src/pageComponents/app/detail/WorkflowComponents/context/workflowEventContext.tsx
+++ b/projects/app/src/pageComponents/app/detail/WorkflowComponents/context/workflowEventContext.tsx
@@ -17,7 +17,7 @@ type WorkflowEventContextType = {
hoverEdgeId?: string;
setHoverEdgeId: React.Dispatch>;
workflowControlMode?: 'drag' | 'select';
- setWorkflowControlMode: (value?: SetState<'drag' | 'select'> | undefined) => void;
+ setWorkflowControlMode: (value: SetState<'drag' | 'select'>) => void;
menu: {
top: number;
left: number;
@@ -40,7 +40,7 @@ export const WorkflowEventContext = createContext({
throw new Error('Function not implemented.');
},
workflowControlMode: 'drag',
- setWorkflowControlMode: function (value?: SetState<'drag' | 'select'> | undefined): void {
+ setWorkflowControlMode: function (value: SetState<'drag' | 'select'>): void {
throw new Error('Function not implemented.');
},
menu: null,
@@ -96,7 +96,7 @@ const WorkflowEventContextProvider = ({ children }: { children: ReactNode }) =>
/* Version histories */
const [showHistoryModal, setShowHistoryModal] = useState(false);
- const contextValue = useMemo(
+ const contextValue = useMemo(
() => ({
mouseInCanvas,
reactFlowWrapper,
diff --git a/projects/app/src/pageComponents/app/evaluation/DetailModal.tsx b/projects/app/src/pageComponents/app/evaluation/DetailModal.tsx
index ccb91da7ef1d..0117c0f4c6c5 100644
--- a/projects/app/src/pageComponents/app/evaluation/DetailModal.tsx
+++ b/projects/app/src/pageComponents/app/evaluation/DetailModal.tsx
@@ -83,15 +83,16 @@ const EvaluationDetailModal = ({
onClose: () => void;
fetchEvalList: () => void;
}) => {
- const { t } = useTranslation();
+ const { t, i18n } = useTranslation();
+ const language = i18n.language;
const [selectedIndex, setSelectedIndex] = useState(0);
const [editing, setEditing] = useState(false);
const [pollingInterval, setPollingInterval] = useState(10000);
const { llmModelList } = useSystemStore();
const modelData = useMemo(
- () => getModelFromList(llmModelList, evalDetail.evalModel),
- [evalDetail.evalModel, llmModelList]
+ () => getModelFromList(llmModelList, evalDetail.evalModel, language),
+ [evalDetail.evalModel, llmModelList, language]
);
const {
diff --git a/projects/app/src/pageComponents/chat/ChatFavouriteApp/index.tsx b/projects/app/src/pageComponents/chat/ChatFavouriteApp/index.tsx
new file mode 100644
index 000000000000..fef3041ccdda
--- /dev/null
+++ b/projects/app/src/pageComponents/chat/ChatFavouriteApp/index.tsx
@@ -0,0 +1,298 @@
+import { getFavouriteApps } from '@/web/core/chat/api';
+import {
+ Box,
+ Button,
+ Flex,
+ Grid,
+ GridItem,
+ Input,
+ InputGroup,
+ InputLeftElement,
+ Tab,
+ TabIndicator,
+ TabList,
+ Tabs
+} from '@chakra-ui/react';
+import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
+import MyIcon from '@fastgpt/web/components/common/Icon';
+import { useTranslation } from 'react-i18next';
+import { useForm } from 'react-hook-form';
+import { useContextSelector } from 'use-context-selector';
+import { ChatSettingContext } from '@/web/core/chat/context/chatSettingContext';
+import { useMemo } from 'react';
+import Avatar from '@fastgpt/web/components/common/Avatar';
+import { ChatSettingTabOptionEnum, ChatSidebarPaneEnum } from '@/pageComponents/chat/constants';
+import MyPopover from '@fastgpt/web/components/common/MyPopover';
+import NextHead from '@/components/common/NextHead';
+import MyBox from '@fastgpt/web/components/common/MyBox';
+import ChatSliderMobileDrawer from '@/pageComponents/chat/slider/ChatSliderMobileDrawer';
+import { useSystem } from '@fastgpt/web/hooks/useSystem';
+import { ChatContext } from '@/web/core/chat/context/chatContext';
+import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
+
+const ChatFavouriteApp = () => {
+ const { isPc } = useSystem();
+ const { t } = useTranslation();
+
+ const onOpenSlider = useContextSelector(ChatContext, (v) => v.onOpenSlider);
+
+ const handlePaneChange = useContextSelector(ChatSettingContext, (v) => v.handlePaneChange);
+ const wideLogoUrl = useContextSelector(ChatSettingContext, (v) => v.chatSettings?.wideLogoUrl);
+ const homeTabTitle = useContextSelector(ChatSettingContext, (v) => v.chatSettings?.homeTabTitle);
+
+ const tags = useContextSelector(ChatSettingContext, (v) => v.chatSettings?.favouriteTags || []);
+ const tagCache = useMemo(() => {
+ return tags.reduce(
+ (acc, tag) => {
+ acc[tag.id] = tag;
+ return acc;
+ },
+ {} as Record
+ );
+ }, [tags]);
+ const tagOptions = useMemo(
+ () => [
+ { label: t('chat:setting.favourite.category_tab.all'), value: '' },
+ ...tags.map((tag) => ({
+ label: tag.name,
+ value: tag.id
+ }))
+ ],
+ [tags, t]
+ );
+
+ const { register, watch, setValue } = useForm<{ name: string; tag: string }>({
+ defaultValues: {
+ name: '',
+ tag: ''
+ }
+ });
+ const searchAppName = watch('name');
+ const selectedTag = watch('tag');
+
+ // load all favourites for checked state and saving
+ const { loading: isSearching, data: favouriteApps = [] } = useRequest2(
+ async () => {
+ return await getFavouriteApps({
+ name: searchAppName,
+ tag: selectedTag
+ });
+ },
+ {
+ manual: false,
+ throttleWait: 500,
+ refreshDeps: [searchAppName, selectedTag]
+ }
+ );
+
+ const TagBox = ({ id }: { id: string }) => {
+ const tag = tagCache[id];
+
+ if (!tag) return null;
+
+ return (
+ e.stopPropagation()}
+ >
+ {tag.name}
+
+ );
+ };
+
+ return (
+
+
+
+ {!isPc && (
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ )}
+
+ {/* header */}
+
+ {/* tag tabs */}
+
+
+ {tagOptions.map((option) => (
+ setValue('tag', option.value)}
+ >
+ {option.label}
+
+ ))}
+
+
+
+
+ {/* search input */}
+ {isPc && (
+
+
+
+
+
+
+ )}
+
+
+ {/* list */}
+ {favouriteApps.length > 0 ? (
+
+ {favouriteApps.map((app) => (
+
+ handlePaneChange(ChatSidebarPaneEnum.RECENTLY_USED_APPS, app.appId)}
+ >
+
+
+
+ {app.name}
+
+
+ {app.intro}
+
+
+
+ {app.favouriteTags.slice(0, 3).map((id) => (
+
+ ))}
+
+ {app.favouriteTags.length > 3 && (
+ e.stopPropagation()}
+ >
+ +{app.favouriteTags.length - 3}
+
+ }
+ >
+ {() => (
+ e.stopPropagation()}
+ >
+ {app.favouriteTags.slice(3).map((id) => (
+
+ ))}
+
+ )}
+
+ )}
+
+
+
+ ))}
+
+ ) : (
+
+
+
+ }
+ onClick={() =>
+ handlePaneChange(
+ ChatSidebarPaneEnum.SETTING,
+ undefined,
+ ChatSettingTabOptionEnum.FAVOURITE_APPS
+ )
+ }
+ >
+ {t('chat:setting.favourite.goto_add')}
+
+
+ )}
+
+ );
+};
+
+export default ChatFavouriteApp;
diff --git a/projects/app/src/pageComponents/chat/ChatHeader.tsx b/projects/app/src/pageComponents/chat/ChatHeader.tsx
index 6f2996e36cfd..a16dc52e5e2b 100644
--- a/projects/app/src/pageComponents/chat/ChatHeader.tsx
+++ b/projects/app/src/pageComponents/chat/ChatHeader.tsx
@@ -31,7 +31,7 @@ import {
} from '@/pageComponents/chat/constants';
import { useChatStore } from '@/web/core/chat/context/useChatStore';
import { usePathname } from 'next/navigation';
-import type { ChatSettingSchema } from '@fastgpt/global/core/chat/setting/type';
+import type { ChatSettingReturnType } from '@fastgpt/global/core/chat/setting/type';
const ChatHeader = ({
history,
@@ -43,7 +43,7 @@ const ChatHeader = ({
chatSettings
}: {
pane: ChatSidebarPaneEnum;
- chatSettings: ChatSettingSchema | undefined;
+ chatSettings?: ChatSettingReturnType;
history: ChatItemType[];
showHistory?: boolean;
diff --git a/projects/app/src/pageComponents/chat/ChatSetting/AppTree.tsx b/projects/app/src/pageComponents/chat/ChatSetting/AppTree.tsx
new file mode 100644
index 000000000000..cd295c8e1f62
--- /dev/null
+++ b/projects/app/src/pageComponents/chat/ChatSetting/AppTree.tsx
@@ -0,0 +1,146 @@
+import type { getMyApps } from '@/web/core/app/api';
+import { Box, Checkbox, Flex } from '@chakra-ui/react';
+import MyIcon from '@fastgpt/web/components/common/Icon';
+import Avatar from '@fastgpt/web/components/common/Avatar';
+import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
+import { useMemo, useState, useCallback } from 'react';
+
+export type App = Awaited>[number];
+
+export const TreeItem = ({
+ app,
+ depth,
+ folder,
+ checked,
+ expanded,
+ onCheck,
+ onCollapse
+}: {
+ app: App;
+ depth: number;
+ folder: boolean;
+ checked: boolean;
+ expanded: boolean;
+ onCheck: (id: string) => void;
+ onCollapse: (id: string) => void;
+}) => {
+ return (
+ (folder ? onCollapse(app._id) : onCheck(app._id))}
+ >
+ {folder ? (
+ {
+ e.stopPropagation();
+ if (folder) onCollapse(app._id);
+ }}
+ >
+
+
+ ) : (
+ onCheck(app._id)} size="sm" />
+ )}
+
+
+
+
+ {app.name}
+
+
+
+ );
+};
+
+export const Tree = ({
+ apps,
+ checkedIds,
+ onCheck
+}: {
+ apps: App[];
+ checkedIds: string[];
+ onCheck: (id: string) => void;
+}) => {
+ const children = useMemo(() => {
+ const map = new Map();
+ apps.forEach((item) => {
+ const key = item.parentId ? String(item.parentId) : '__root__';
+ const list = map.get(key) || [];
+ list.push(item);
+ map.set(key, list);
+ });
+ return map;
+ }, [apps]);
+
+ const [expanded, setExpanded] = useState>({});
+ const handleExpand = useCallback((id: string) => {
+ setExpanded((prev) => ({ ...prev, [id]: !prev[id] }));
+ }, []);
+
+ const RenderNodes = useCallback(
+ ({ parent, depth }: { parent: string; depth: number }) => {
+ const list = children.get(parent) || [];
+ return (
+ <>
+ {list.map((node) => {
+ const nodeId = String(node._id);
+ const isExpanded = !!expanded[nodeId];
+ const folder = node.type === AppTypeEnum.folder;
+
+ return (
+
+
+
+ {folder && isExpanded && (
+
+
+
+ )}
+
+ );
+ })}
+ >
+ );
+ },
+ [children, checkedIds, expanded, onCheck, handleExpand]
+ );
+
+ return ;
+};
+
+export default Tree;
diff --git a/projects/app/src/pageComponents/chat/ChatSetting/DataDashboard.tsx b/projects/app/src/pageComponents/chat/ChatSetting/DataDashboard.tsx
index 4f70f74eb40f..d9e4ecca3258 100644
--- a/projects/app/src/pageComponents/chat/ChatSetting/DataDashboard.tsx
+++ b/projects/app/src/pageComponents/chat/ChatSetting/DataDashboard.tsx
@@ -28,18 +28,11 @@ const LogDetails = ({ Header }: Props) => {
} = useMultipleSelect(Object.values(ChatSourceEnum), true);
return (
-
+
void;
+ onRefresh: () => Promise;
+};
+
+const AddFavouriteAppModal = ({ onClose, onRefresh }: Props) => {
+ const { t } = useTranslation();
+
+ const { watch: watchSearchValue, setValue } = useForm<{ name: string }>({
+ defaultValues: {
+ name: ''
+ }
+ });
+ const searchAppNameValue = watchSearchValue('name');
+
+ const [parentId, setParentId] = useState('');
+ const { data: appData = { apps: [], paths: [] }, loading: isFetching } = useRequest2(
+ async () => {
+ const [apps, paths] = await Promise.all([
+ getMyApps({
+ parentId,
+ searchKey: searchAppNameValue,
+ type: [AppTypeEnum.folder, AppTypeEnum.simple, AppTypeEnum.workflow, AppTypeEnum.plugin]
+ }),
+ searchAppNameValue.trim()
+ ? Promise.resolve([])
+ : getAppFolderPath({ sourceId: parentId, type: 'current' })
+ ]);
+ return { apps, paths };
+ },
+ {
+ manual: false,
+ throttleWait: 500,
+ refreshDeps: [parentId, searchAppNameValue]
+ }
+ );
+ const availableApps = appData.apps;
+ const paths = appData.paths;
+
+ const [selectedApps, setSelectedApps] = useState<{ id: string; name: string; avatar: string }[]>(
+ []
+ );
+
+ useRequest2(getFavouriteApps, {
+ manual: false,
+ onSuccess(res) {
+ setSelectedApps(
+ res.map((item) => ({ id: item.appId, name: item.name, avatar: item.avatar }))
+ );
+ }
+ });
+
+ const handleCheck = useCallback((app: { id: string; name: string; avatar: string }) => {
+ setSelectedApps((prev) => {
+ const exists = prev.some((item) => item.id === app.id);
+ if (exists) {
+ return prev.filter((item) => item.id !== app.id);
+ }
+ return [{ id: app.id, name: app.name, avatar: app.avatar }, ...prev];
+ });
+ }, []);
+
+ const { run: updateFavourites, loading: isUpdating } = useRequest2(
+ async () => {
+ await updateFavouriteApps(
+ selectedApps.map((app, index) => ({ appId: app.id, order: index + 1 }))
+ );
+ },
+ {
+ manual: true,
+ onSuccess: async () => {
+ await onRefresh();
+ onClose();
+ }
+ }
+ );
+
+ return (
+
+
+
+
+
+
+
+ setValue('name', e.target.value)}
+ size="md"
+ />
+
+
+
+ {searchAppNameValue && (
+
+ {t('chat:search_results')}
+
+ )}
+ {!searchAppNameValue && paths.length === 0 && (
+
+ setParentId('')}
+ >
+ {t('common:root_folder')}
+
+
+
+ )}
+ {!searchAppNameValue && paths.length > 0 && (
+ ({ parentId: p.parentId, parentName: p.parentName }))}
+ FirstPathDom={t('common:root_folder')}
+ onClick={(e) => setParentId(e)}
+ />
+ )}
+
+
+
+ {availableApps.length === 0 && !isFetching && (
+
+ )}
+ {availableApps.map((item: App) => (
+
+ {
+ if (item.type === AppTypeEnum.folder) {
+ if (searchAppNameValue) setValue('name', '');
+ setParentId(String(item._id));
+ } else {
+ handleCheck({ id: item._id, name: item.name, avatar: item.avatar });
+ }
+ }}
+ >
+ e.stopPropagation()}>
+ {item.type !== AppTypeEnum.folder && (
+ app.id === item._id)}
+ onChange={() =>
+ handleCheck({ id: item._id, name: item.name, avatar: item.avatar })
+ }
+ colorScheme="blue"
+ size="sm"
+ />
+ )}
+
+
+
+
+
+
+ {item.name}
+
+
+ {item.type === AppTypeEnum.folder ? t('common:Folder') : ''}
+
+
+
+ {item.type === AppTypeEnum.folder && (
+
+
+
+ )}
+
+
+ ))}
+
+
+
+
+
+
+
+ {t('chat:setting.favourite.selected_list', {
+ num: selectedApps.length
+ })}
+
+
+
+ {selectedApps.length === 0 && !isFetching && (
+
+ )}
+
+ {selectedApps.map((app) => {
+ return (
+
+
+
+ {app.name}
+
+
+ handleCheck(app)}
+ />
+
+
+ );
+ })}
+
+
+
+
+
+
+
+
+
+
+
+
+
+ );
+};
+
+export default React.memo(AddFavouriteAppModal);
diff --git a/projects/app/src/pageComponents/chat/ChatSetting/FavouriteAppSetting/TagManageModal.tsx b/projects/app/src/pageComponents/chat/ChatSetting/FavouriteAppSetting/TagManageModal.tsx
new file mode 100644
index 000000000000..8b3ea8254764
--- /dev/null
+++ b/projects/app/src/pageComponents/chat/ChatSetting/FavouriteAppSetting/TagManageModal.tsx
@@ -0,0 +1,611 @@
+import { ChatSettingContext } from '@/web/core/chat/context/chatSettingContext';
+import { AddIcon } from '@chakra-ui/icons';
+import {
+ Box,
+ Button,
+ Checkbox,
+ Flex,
+ IconButton,
+ Input,
+ InputGroup,
+ InputLeftElement,
+ useDisclosure,
+ VStack
+} from '@chakra-ui/react';
+import MyModal from '@fastgpt/web/components/common/MyModal';
+import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
+import React, { useEffect, useRef, useState, useCallback, useMemo } from 'react';
+import { useTranslation } from 'react-i18next';
+import { useContextSelector } from 'use-context-selector';
+import MyIcon from '@fastgpt/web/components/common/Icon';
+import type { ChatFavouriteTagType } from '@fastgpt/global/core/chat/setting/type';
+import { getNanoid } from '@fastgpt/global/common/string/tools';
+import { getFavouriteApps, updateChatSetting, updateFavouriteAppTags } from '@/web/core/chat/api';
+import { useForm } from 'react-hook-form';
+import Avatar from '@fastgpt/web/components/common/Avatar';
+import type { ChatFavouriteApp } from '@fastgpt/global/core/chat/favouriteApp/type';
+import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
+import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
+import DndDrag, { Draggable } from '@fastgpt/web/components/common/DndDrag';
+
+type EditableTagItemProps = {
+ tag: ChatFavouriteTagType;
+ isEditing: boolean;
+ onStartEdit: () => void;
+ onCommit: (updated: ChatFavouriteTagType) => Promise | void;
+ onCancelNew: (tag: ChatFavouriteTagType) => void;
+ onExitEdit: (tag: ChatFavouriteTagType) => void;
+ onConfirmDelete: (tag: ChatFavouriteTagType) => void;
+ onSaveTagForApp: (tag: ChatFavouriteTagType) => void;
+ appCount?: number;
+};
+
+const EditableTagItem = React.memo(function EditableTagItem({
+ isEditing,
+ tag: initialTag,
+ onCommit,
+ onCancelNew,
+ onExitEdit,
+ onStartEdit,
+ onConfirmDelete,
+ onSaveTagForApp,
+ appCount
+}: EditableTagItemProps) {
+ const { t } = useTranslation();
+
+ const [tag, setTag] = useState(initialTag);
+ const [isSelfEditing, setIsSelfEditing] = useState(isEditing);
+ const inputRef = useRef(null);
+
+ const { ConfirmModal, openConfirm } = useConfirm({
+ type: 'delete',
+ content: t('chat:setting.favourite.categories_modal.delete_confirm', {
+ name: initialTag.name
+ })
+ });
+
+ const handleConfirmDelete = useCallback(() => {
+ openConfirm(() => {
+ onConfirmDelete(tag);
+ })();
+ }, [openConfirm, onConfirmDelete, tag]);
+
+ const handleFinishEdit = useCallback(async () => {
+ // 取消或者复原 tag 的名称
+ if (tag.name.trim() === '') {
+ if ((initialTag.name || '').trim() === '') {
+ onCancelNew(initialTag);
+ } else {
+ setTag(initialTag);
+ setIsSelfEditing(false);
+ }
+ onExitEdit(initialTag);
+ if (inputRef.current) inputRef.current.blur();
+ return;
+ }
+ setIsSelfEditing(false);
+ await onCommit(tag);
+
+ if (inputRef.current) inputRef.current.blur();
+ }, [tag, onCommit, onCancelNew, onExitEdit, initialTag]);
+
+ useEffect(() => {
+ setIsSelfEditing(isEditing);
+ }, [isEditing]);
+
+ useEffect(() => {
+ if (isSelfEditing) return;
+ // sync from props when not editing
+ setTag(initialTag);
+ }, [initialTag, isSelfEditing]);
+
+ useEffect(() => {
+ if (!inputRef.current || !isSelfEditing) return;
+ inputRef.current.focus();
+ }, [isSelfEditing]);
+
+ return (
+
+
+ {isSelfEditing ? (
+ {
+ const nextName = e.target.value;
+ setTag({ ...tag, name: nextName });
+ }}
+ onKeyDown={(e) => {
+ if (e.key.toLowerCase() !== 'enter') return;
+ handleFinishEdit();
+ }}
+ />
+ ) : (
+
+ {tag.name}
+
+ )}
+ ({appCount ?? 0})
+
+
+ {!isSelfEditing && (
+
+ }
+ onClick={() => onSaveTagForApp(tag)}
+ />
+
+ }
+ onClick={() => {
+ onStartEdit();
+ setIsSelfEditing(true);
+ }}
+ />
+
+ }
+ onClick={() => handleConfirmDelete()}
+ />
+
+ )}
+
+
+
+ );
+});
+
+const SaveTagForAppSubPanel = ({
+ tag,
+ onClose,
+ onRefresh
+}: {
+ tag: ChatFavouriteTagType;
+ onClose: () => void;
+ onRefresh: () => Promise;
+}) => {
+ const { t } = useTranslation();
+
+ const { register, watch } = useForm<{ name: string }>({
+ defaultValues: {
+ name: ''
+ }
+ });
+ const searchAppName = watch('name');
+ // search favourite apps for list rendering (only favourites, not all apps)
+ const { data: visibleFavourites = [], loading: isSearching } = useRequest2(
+ async () => {
+ return await getFavouriteApps({ name: searchAppName });
+ },
+ {
+ manual: false,
+ throttleWait: 500,
+ refreshDeps: [searchAppName]
+ }
+ );
+
+ // load all favourites for checked state and saving
+ const { data: favouriteApps = [] } = useRequest2(
+ async () => {
+ return await getFavouriteApps({ name: '' });
+ },
+ {
+ manual: false
+ }
+ );
+
+ const [localAllFavourites, setLocalAllFavourites] = useState([]);
+
+ useEffect(() => {
+ setLocalAllFavourites(favouriteApps);
+ }, [favouriteApps]);
+
+ const checkedAppIds = useMemo(() => {
+ return (localAllFavourites || [])
+ .filter((fav) => Array.isArray(fav.favouriteTags) && fav.favouriteTags.includes(tag.id))
+ .map((fav) => fav.appId);
+ }, [localAllFavourites, tag.id]);
+
+ const isAppChecked = useCallback(
+ (appId: string) => {
+ const f = (localAllFavourites || []).find((f) => f.appId === appId);
+ return Array.isArray(f?.favouriteTags) && f.favouriteTags.includes(tag.id);
+ },
+ [localAllFavourites, tag.id]
+ );
+
+ const toggleAppChecked = useCallback(
+ (appId: string) => {
+ setLocalAllFavourites((prev) =>
+ (prev || []).map((item) => {
+ if (item.appId !== appId) return item;
+ const tags: string[] = Array.isArray(item.favouriteTags) ? [...item.favouriteTags] : [];
+ const idx = tags.indexOf(tag.id);
+ if (idx >= 0) {
+ tags.splice(idx, 1);
+ } else {
+ tags.push(tag.id);
+ }
+ return { ...item, favouriteTags: tags };
+ })
+ );
+ },
+ [tag.id]
+ );
+
+ // save apps (update tags) via updateFavouriteApps
+ const { loading: isSaving, runAsync: saveApps } = useRequest2(
+ async () => {
+ await updateFavouriteAppTags(
+ localAllFavourites.map((item) => ({ id: item._id, tags: item.favouriteTags }))
+ );
+ },
+ {
+ manual: true,
+ onSuccess: async () => {
+ await onRefresh();
+ onClose();
+ }
+ }
+ );
+
+ return (
+
+
+
+
+ }
+ onClick={onClose}
+ />
+
+
+
+ {tag.name}
+
+ ({checkedAppIds.length})
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {visibleFavourites.length > 0 ? (
+
+ {visibleFavourites.map((fav: any) => (
+ toggleAppChecked(fav.appId)}
+ >
+ {
+ e.stopPropagation();
+ toggleAppChecked(fav.appId);
+ }}
+ size="sm"
+ />
+
+
+
+ {fav.name || fav.appId}
+
+
+
+ ))}
+
+ ) : (
+
+
+
+ )}
+
+ );
+};
+
+type Props = {
+ onClose: () => void;
+ onRefresh: () => Promise;
+};
+
+const TagManageModal = ({ onClose, onRefresh }: Props) => {
+ const { t } = useTranslation();
+
+ const refreshChatSetting = useContextSelector(ChatSettingContext, (v) => v.refreshChatSetting);
+
+ // get tags from db
+ const tags = useContextSelector(ChatSettingContext, (v) => v.chatSettings?.favouriteTags || []);
+ // local editable tags list
+ const [localTags, setLocalTags] = useState(tags);
+
+ // control the editable state
+ const [isEditing, setIsEditing] = useState([]);
+
+ // update tags
+ const { loading: isUpdating, runAsync: updateTags } = useRequest2(
+ async (nextTags: ChatFavouriteTagType[]) => {
+ await updateChatSetting({ favouriteTags: nextTags });
+ },
+ {
+ manual: true,
+ onSuccess: async () => {
+ await refreshChatSetting();
+ // after successful update, exit all editing states
+ setIsEditing([]);
+ }
+ }
+ );
+
+ // handle click new tag button
+ const handleClickNewTag = () => {
+ const id = getNanoid(8);
+ const next = [{ id, name: '' }, ...localTags];
+ setLocalTags(next as ChatFavouriteTagType[]);
+ setIsEditing((prev) => [...prev, id]);
+ };
+
+ // handle commit updated tag to server
+ const handleCommitTag = useCallback(
+ async (updated: ChatFavouriteTagType) => {
+ // compute next tags deterministically and use it for both state and request
+ const next = localTags.map((c) => (c.id === updated.id ? updated : c));
+ setLocalTags(next);
+ setIsEditing((prev) => prev.filter((v) => v !== updated.id));
+ await updateTags(next);
+ },
+ [localTags, updateTags]
+ );
+
+ const handleCancelNewTag = useCallback((target: ChatFavouriteTagType) => {
+ setLocalTags((prev) => prev.filter((c) => c.id !== target.id));
+ setIsEditing((prev) => prev.filter((v) => v !== target.id));
+ }, []);
+
+ const handleExitEdit = useCallback((target: ChatFavouriteTagType) => {
+ setIsEditing((prev) => prev.filter((v) => v !== target.id));
+ }, []);
+ // delete tag
+ const { loading: isDeleting, runAsync: deleteTag } = useRequest2(
+ async (target: ChatFavouriteTagType) => {
+ const next = localTags.filter((c) => c.id !== target.id);
+ setLocalTags(next);
+ await updateTags(next);
+ },
+ {
+ manual: true
+ }
+ );
+
+ const {
+ isOpen: isSaveTagForAppSubPanelOpen,
+ onOpen: onOpenSaveTagForAppSubPanel,
+ onClose: onCloseSaveTagForAppSubPanel
+ } = useDisclosure();
+
+ const [currentSaveTagForApp, setCurrentSaveTagForApp] = useState(
+ null
+ );
+
+ const handleOpenSaveTagForAppSubPanel = useCallback(
+ (tag: ChatFavouriteTagType) => {
+ setCurrentSaveTagForApp(tag);
+ onOpenSaveTagForAppSubPanel();
+ },
+ [onOpenSaveTagForAppSubPanel]
+ );
+
+ const isLoading = isUpdating || isDeleting || isEditing.length > 0;
+
+ // counts
+ const { data: allFavourites = [] } = useRequest2(
+ async () => {
+ return await getFavouriteApps({ name: '' });
+ },
+ {
+ manual: false,
+ refreshDeps: [isSaveTagForAppSubPanelOpen]
+ }
+ );
+ const tagIdToCount = useMemo(() => {
+ const map = new Map();
+ (allFavourites || []).forEach((fav: any) => {
+ const tags: string[] = Array.isArray(fav?.favouriteTags) ? fav.favouriteTags : [];
+ tags.forEach((tid) => map.set(tid, (map.get(tid) || 0) + 1));
+ });
+ return map;
+ }, [allFavourites]);
+
+ return (
+ <>
+
+ {isSaveTagForAppSubPanelOpen ? (
+
+ ) : (
+
+
+
+
+
+
+ {t('chat:setting.favourite.categories_modal.title', {
+ num: localTags.length
+ })}
+
+
+
+ }
+ onClick={handleClickNewTag}
+ >
+ {t('common:new_create')}
+
+
+
+
+ {localTags.length > 0 ? (
+
+
+ dataList={localTags}
+ renderInnerPlaceholder={false}
+ onDragEndCb={(list) => {
+ setLocalTags(list);
+ updateTags(list);
+ }}
+ >
+ {({ provided }) => (
+
+ {localTags.map((tag, index) => (
+
+ {(provided, snapshot) => (
+
+
+
+
+
+
+ setIsEditing((prev) => [...prev, tag.id])}
+ onConfirmDelete={(c) => deleteTag(c)}
+ onSaveTagForApp={handleOpenSaveTagForAppSubPanel}
+ />
+
+
+
+ )}
+
+ ))}
+ {provided.placeholder}
+
+ )}
+
+
+ ) : (
+
+
+
+ )}
+
+ )}
+
+ >
+ );
+};
+
+export default React.memo(TagManageModal);
diff --git a/projects/app/src/pageComponents/chat/ChatSetting/FavouriteAppSetting/index.tsx b/projects/app/src/pageComponents/chat/ChatSetting/FavouriteAppSetting/index.tsx
new file mode 100644
index 000000000000..8daec2228a0f
--- /dev/null
+++ b/projects/app/src/pageComponents/chat/ChatSetting/FavouriteAppSetting/index.tsx
@@ -0,0 +1,377 @@
+import { ChatSettingContext } from '@/web/core/chat/context/chatSettingContext';
+import {
+ Button,
+ ButtonGroup,
+ Flex,
+ HStack,
+ IconButton,
+ Input,
+ InputGroup,
+ InputLeftElement,
+ Table,
+ TableContainer,
+ Tbody,
+ Td,
+ Th,
+ Thead,
+ Tr,
+ useDisclosure
+} from '@chakra-ui/react';
+import MySelect from '@fastgpt/web/components/common/MySelect';
+import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
+import { useRef, useState } from 'react';
+import { useForm } from 'react-hook-form';
+import { useTranslation } from 'react-i18next';
+import { useContextSelector } from 'use-context-selector';
+import MyIcon from '@fastgpt/web/components/common/Icon';
+import { AddIcon } from '@chakra-ui/icons';
+import { deleteFavouriteApp, getFavouriteApps, updateFavouriteAppOrder } from '@/web/core/chat/api';
+import DndDrag, { Draggable } from '@fastgpt/web/components/common/DndDrag';
+import Avatar from '@fastgpt/web/components/common/Avatar';
+import { Box, Wrap } from '@chakra-ui/react';
+import type { ChatFavouriteApp } from '@fastgpt/global/core/chat/favouriteApp/type';
+import MyBox from '@fastgpt/web/components/common/MyBox';
+import MyPopover from '@fastgpt/web/components/common/MyPopover';
+import type { ChatFavouriteTagType } from '@fastgpt/global/core/chat/setting/type';
+import dynamic from 'next/dynamic';
+import { useSystemStore } from '@/web/common/system/useSystemStore';
+import PopoverConfirm from '@fastgpt/web/components/common/MyPopover/PopoverConfirm';
+import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
+
+const TagManageModal = dynamic(
+ () => import('@/pageComponents/chat/ChatSetting/FavouriteAppSetting/TagManageModal')
+);
+const AddFavouriteAppModal = dynamic(
+ () => import('@/pageComponents/chat/ChatSetting/FavouriteAppSetting/AddFavouriteAppModal')
+);
+
+type Props = {
+ Header: React.FC<{ children?: React.ReactNode }>;
+};
+
+const FavouriteAppSetting = ({ Header }: Props) => {
+ const { t } = useTranslation();
+
+ // search apps input
+ const {
+ register,
+ setValue: setSearchValue,
+ watch: watchSearchValue
+ } = useForm<{ search: string; tag: string }>({
+ defaultValues: {
+ search: '',
+ tag: ''
+ }
+ });
+
+ const searchAppNameValue = watchSearchValue('search');
+
+ const searchAppTagValue = watchSearchValue('tag');
+ // apps' tags options
+ const tagOptions = useContextSelector(ChatSettingContext, (v) => {
+ const tags = v.chatSettings?.favouriteTags || [];
+ return [
+ { label: t('chat:setting.favourite.category_all'), value: '' },
+ ...tags.map((c) => ({ label: c.name, value: c.id }))
+ ];
+ });
+ // app's tags cache map
+ const tagMap = useContextSelector(ChatSettingContext, (v) =>
+ (v.chatSettings?.favouriteTags || []).reduce>(
+ (acc, tag) => {
+ acc[tag.id] = { ...tag };
+ return acc;
+ },
+ {}
+ )
+ );
+
+ const [localFavourites, setLocalFavourites] = useState([]);
+
+ // search favourite apps by apps' name and tag
+ const { loading: isSearching, runAsync: getApps } = useRequest2(
+ async () => {
+ const apps = await getFavouriteApps({
+ name: searchAppNameValue,
+ tag: searchAppTagValue
+ });
+
+ setLocalFavourites(apps);
+ },
+ {
+ manual: false,
+ throttleWait: 500,
+ refreshDeps: [searchAppNameValue, searchAppTagValue]
+ }
+ );
+
+ // update app order
+ const { runAsync: orderApp } = useRequest2(
+ async (list: ChatFavouriteApp[]) => {
+ await updateFavouriteAppOrder(
+ list.map((item, idx) => ({
+ id: item._id,
+ order: idx
+ }))
+ );
+ getApps();
+ },
+ { manual: true }
+ );
+
+ // delete app
+ const { runAsync: deleteApp } = useRequest2(
+ async (id: string) => {
+ await deleteFavouriteApp(id);
+ getApps();
+ },
+ { manual: true }
+ );
+
+ // open tag manage modal
+ const {
+ isOpen: isOpenTagManageModal,
+ onOpen: onOpenTagManageModal,
+ onClose: onCloseTagManageModal
+ } = useDisclosure();
+
+ // open add app modal
+ const {
+ isOpen: isOpenAddAppModal,
+ onOpen: onOpenAddAppModal,
+ onClose: onCloseAddAppModal
+ } = useDisclosure();
+
+ const TagBox = ({ id }: { id: string }) => {
+ const tag = tagMap[id];
+
+ if (!tag) return null;
+
+ return (
+ e.stopPropagation()}
+ >
+ {tag.name}
+
+ );
+ };
+
+ return (
+ <>
+
+
+
+
+
+
+
+ |
+ {t('chat:setting.favourite.table_column_name')} |
+ {t('chat:setting.favourite.table_column_intro')} |
+ {t('chat:setting.favourite.table_column_category')} |
+
+ {t('chat:setting.favourite.table_column_action')}
+ |
+
+
+
+
+ dataList={localFavourites}
+ renderInnerPlaceholder={false}
+ onDragEndCb={(list) => {
+ const next = list.map((item, idx) => ({ ...item, order: idx }));
+ setLocalFavourites(next);
+ orderApp(next);
+ }}
+ >
+ {({ provided }) => (
+
+ {localFavourites.map((row, index) => (
+
+ {(provided, snapshot) => (
+
+ {/* drag handle */}
+ |
+
+
+
+ |
+
+ {/* name */}
+
+
+
+ {row.name || ''}
+
+ |
+
+ {/* intro */}
+
+
+ {row.intro || ''}
+
+ |
+
+ {/* tags */}
+
+
+ {row.favouriteTags.slice(0, 3).map((id) => (
+
+ ))}
+
+ {row.favouriteTags.length > 3 && (
+ e.stopPropagation()}
+ >
+ +{row.favouriteTags.length - 3}
+
+ }
+ >
+ {() => (
+ e.stopPropagation()}
+ >
+ {row.favouriteTags.slice(3).map((id) => (
+
+ ))}
+
+ )}
+
+ )}
+
+ |
+
+ {/* action */}
+
+ {
+ setLocalFavourites((prev) => {
+ const next = prev.filter((_, i) => i !== index);
+ // reset order
+ const ordered = next.map((item, idx) => ({
+ ...item,
+ order: idx
+ }));
+ deleteApp(row._id);
+ return ordered;
+ });
+ }}
+ Trigger={
+ }
+ />
+ }
+ />
+ |
+
+ )}
+
+ ))}
+ {provided.placeholder}
+
+ )}
+
+
+ {localFavourites.length === 0 && }
+
+
+
+ {isOpenTagManageModal && (
+
+ )}
+
+ {isOpenAddAppModal && (
+
+ )}
+ >
+ );
+};
+
+export default FavouriteAppSetting;
diff --git a/projects/app/src/pageComponents/chat/ChatSetting/HomepageSetting/AddQuickAppModal.tsx b/projects/app/src/pageComponents/chat/ChatSetting/HomepageSetting/AddQuickAppModal.tsx
new file mode 100644
index 000000000000..acd1ab104d4a
--- /dev/null
+++ b/projects/app/src/pageComponents/chat/ChatSetting/HomepageSetting/AddQuickAppModal.tsx
@@ -0,0 +1,400 @@
+import { getMyApps, getAppBasicInfoByIds } from '@/web/core/app/api';
+import { Box, Button, Grid, GridItem, HStack, VStack, Flex, Checkbox } from '@chakra-ui/react';
+import MyModal from '@fastgpt/web/components/common/MyModal';
+import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
+import { useCallback, useEffect, useMemo, useState } from 'react';
+import { useForm } from 'react-hook-form';
+import { useTranslation } from 'react-i18next';
+import type { App } from '@/pageComponents/chat/ChatSetting/AppTree';
+import MyIcon from '@fastgpt/web/components/common/Icon';
+import type { QuickAppType } from '@fastgpt/global/core/chat/setting/type';
+import Avatar from '@fastgpt/web/components/common/Avatar';
+import DndDrag, { Draggable } from '@fastgpt/web/components/common/DndDrag';
+import SearchInput from '@fastgpt/web/components/common/Input/SearchInput';
+import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
+import FolderPath from '@/components/common/folder/Path';
+import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
+import { getAppFolderPath } from '@/web/core/app/api/app';
+import { ChevronRightIcon } from '@chakra-ui/icons';
+
+type Props = {
+ selectedIds: string[];
+ onClose: () => void;
+ onConfirm: (list: QuickAppType[]) => void;
+};
+
+const AddQuickAppModal = ({ selectedIds, onClose, onConfirm }: Props) => {
+ const { t } = useTranslation();
+
+ const [localSelectedIds, setLocalSelectedIds] = useState(selectedIds);
+
+ const [selectedInfo, setSelectedInfo] = useState>({});
+
+ const { watch, setValue } = useForm<{ name: string }>({
+ defaultValues: {
+ name: ''
+ }
+ });
+ const searchAppName = watch('name');
+
+ const [parentId, setParentId] = useState('');
+
+ const {
+ data: appData = { apps: [], paths: [] as { parentId: string; parentName: string }[] },
+ loading: isFetching
+ } = useRequest2(
+ async () => {
+ const [apps, paths] = await Promise.all([
+ getMyApps({
+ parentId,
+ searchKey: searchAppName,
+ type: [AppTypeEnum.folder, AppTypeEnum.simple, AppTypeEnum.workflow]
+ }),
+ searchAppName.trim()
+ ? Promise.resolve([])
+ : getAppFolderPath({ sourceId: parentId, type: 'current' })
+ ]);
+ return { apps, paths };
+ },
+ {
+ manual: false,
+ throttleWait: 500,
+ refreshDeps: [parentId, searchAppName]
+ }
+ );
+ const availableApps = appData.apps;
+ const paths = appData.paths;
+
+ const availableAppsMap = useMemo(() => {
+ const map = new Map();
+ availableApps.forEach((app) => map.set(app._id, app));
+ return map;
+ }, [availableApps]);
+
+ const handleCheck = useCallback(
+ (id: string) => {
+ setLocalSelectedIds((prev) => {
+ const exists = prev.includes(id);
+ if (exists) {
+ // remove id and its cached info
+ setSelectedInfo((old) => {
+ const next: Record = { ...old };
+ delete next[id];
+ return next;
+ });
+ return prev.filter((v) => v !== id);
+ }
+ if (prev.length >= 4) return prev;
+ // add id and cache its info if available from current list
+ const app = availableAppsMap.get(id);
+ if (app) {
+ setSelectedInfo((old) => ({
+ ...old,
+ [id]: { _id: id, name: app.name, avatar: app.avatar }
+ }));
+ }
+ return [...prev, id];
+ });
+ },
+ [availableAppsMap]
+ );
+
+ const checkedQuickApps = useMemo(() => {
+ return localSelectedIds
+ .map((id) => {
+ const cached = selectedInfo[id];
+ if (cached) return cached;
+
+ const app = availableAppsMap.get(id);
+ if (app) return { _id: app._id, name: app.name, avatar: app.avatar };
+ })
+ .filter(Boolean) as QuickAppType[];
+ }, [localSelectedIds, selectedInfo, availableAppsMap]);
+
+ useEffect(() => {
+ const missing = localSelectedIds.filter((id) => !selectedInfo[id]);
+ if (missing.length === 0) return;
+ getAppBasicInfoByIds(missing)
+ .then((list) => {
+ setSelectedInfo((old) => {
+ const next: Record = { ...old };
+ list.forEach((item) => {
+ next[item.id] = { _id: item.id, name: item.name, avatar: item.avatar };
+ });
+ return next;
+ });
+ })
+ .catch(() => {});
+ }, [localSelectedIds, selectedInfo]);
+
+ const { loading: isUpdating, runAsync: confirmSelect } = useRequest2(
+ async () => {
+ onConfirm(checkedQuickApps);
+ },
+ {
+ refreshDeps: [checkedQuickApps],
+ manual: true,
+ onSuccess: onClose
+ }
+ );
+
+ return (
+
+
+
+
+
+
+
+ {
+ const v = e.target.value;
+ setValue('name', v);
+ }}
+ size="md"
+ />
+
+
+
+ {searchAppName && (
+
+ {t('chat:search_results')}
+
+ )}
+ {!searchAppName && paths.length === 0 && (
+
+ setParentId('')}
+ >
+ {t('common:root_folder')}
+
+
+
+ )}
+ {!searchAppName && paths.length > 0 && (
+ ({ parentId: p.parentId, parentName: p.parentName }))}
+ FirstPathDom={t('common:root_folder')}
+ onClick={(e) => setParentId(e)}
+ />
+ )}
+
+
+
+ {availableApps.length === 0 && !isFetching && (
+
+ )}
+ {availableApps.map((item: App) => (
+
+ {
+ if (item.type === AppTypeEnum.folder) {
+ if (searchAppName) {
+ setValue('name', '');
+ }
+ setParentId(String(item._id));
+ } else {
+ handleCheck(String(item._id));
+ }
+ }}
+ >
+ e.stopPropagation()}>
+ {item.type !== AppTypeEnum.folder && (
+ handleCheck(String(item._id))}
+ colorScheme="blue"
+ size="sm"
+ />
+ )}
+
+
+
+
+
+
+ {item.name}
+
+
+ {item.type === AppTypeEnum.folder ? t('common:Folder') : ''}
+
+
+
+ {item.type === AppTypeEnum.folder && (
+
+
+
+ )}
+
+
+ ))}
+
+
+
+
+
+
+
+ {t('chat:setting.favourite.selected_list', {
+ num: `${checkedQuickApps.length} / 4`
+ })}
+
+
+
+ {checkedQuickApps.length === 0 && !isFetching && (
+
+ )}
+
+ dataList={checkedQuickApps}
+ renderInnerPlaceholder={false}
+ onDragEndCb={(list) => {
+ const newOrderIds = list.map((item) => item._id);
+ setLocalSelectedIds(newOrderIds);
+ }}
+ >
+ {({ provided }) => (
+
+ {checkedQuickApps.map((q, index) => {
+ const app = selectedInfo[q._id] || {
+ _id: q._id,
+ name: q.name,
+ avatar: q.avatar
+ };
+ return (
+
+ {(provided, snapshot) => (
+
+
+
+
+
+
+
+ {app.name}
+
+
+
+ handleCheck(q._id)}
+ />
+
+
+ )}
+
+ );
+ })}
+ {provided.placeholder}
+
+ )}
+
+
+
+
+
+
+
+
+
+
+
+
+
+ );
+};
+
+export default AddQuickAppModal;
diff --git a/projects/app/src/pageComponents/chat/ChatSetting/HomepageSetting.tsx b/projects/app/src/pageComponents/chat/ChatSetting/HomepageSetting/index.tsx
similarity index 80%
rename from projects/app/src/pageComponents/chat/ChatSetting/HomepageSetting.tsx
rename to projects/app/src/pageComponents/chat/ChatSetting/HomepageSetting/index.tsx
index c643d914e8cf..52fed72e612d 100644
--- a/projects/app/src/pageComponents/chat/ChatSetting/HomepageSetting.tsx
+++ b/projects/app/src/pageComponents/chat/ChatSetting/HomepageSetting/index.tsx
@@ -1,4 +1,4 @@
-import { Box, Button, Flex, Grid, Input } from '@chakra-ui/react';
+import { Box, Button, Flex, Grid, IconButton, Input, useDisclosure } from '@chakra-ui/react';
import { useTranslation } from 'react-i18next';
import MyInput from '@/components/MyInput';
import { useCallback, useState } from 'react';
@@ -9,9 +9,10 @@ import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import ImageUpload from '@/pageComponents/chat/ChatSetting/ImageUpload';
import type {
ChatSettingSchema,
- ChatSettingUpdateParams
+ ChatSettingUpdateParams,
+ QuickAppType,
+ SelectedToolType
} from '@fastgpt/global/core/chat/setting/type';
-import NextHead from '@/components/common/NextHead';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import ToolSelectModal from '@/pageComponents/chat/ChatSetting/ToolSelectModal';
import type { FlowNodeTemplateType } from '@fastgpt/global/core/workflow/type/node.d';
@@ -25,14 +26,21 @@ import {
DEFAULT_LOGO_BANNER_URL
} from '@/pageComponents/chat/constants';
import { useSystem } from '@fastgpt/web/hooks/useSystem';
+import dynamic from 'next/dynamic';
+import type { ChatSettingReturnType } from '@fastgpt/global/core/chat/setting/type';
+
+const AddQuickAppModal = dynamic(
+ () => import('@/pageComponents/chat/ChatSetting/HomepageSetting/AddQuickAppModal')
+);
type Props = {
Header: React.FC<{ children?: React.ReactNode }>;
onDiagramShow: (show: boolean) => void;
};
-type FormValues = Omit & {
- selectedTools: ChatSettingSchema['selectedTools'];
+type FormValues = Omit & {
+ selectedTools: SelectedToolType[];
+ quickAppList: QuickAppType[];
};
const HomepageSetting = ({ Header, onDiagramShow }: Props) => {
@@ -44,14 +52,15 @@ const HomepageSetting = ({ Header, onDiagramShow }: Props) => {
const refreshChatSetting = useContextSelector(ChatSettingContext, (v) => v.refreshChatSetting);
const chatSettings2Form = useCallback(
- (data?: ChatSettingSchema) => {
+ (data?: ChatSettingReturnType) => {
return {
slogan: data?.slogan || t('chat:setting.home.slogan.default'),
dialogTips: data?.dialogTips || t('chat:setting.home.dialogue_tips.default'),
homeTabTitle: data?.homeTabTitle || 'FastGPT',
selectedTools: data?.selectedTools || [],
wideLogoUrl: data?.wideLogoUrl,
- squareLogoUrl: data?.squareLogoUrl
+ squareLogoUrl: data?.squareLogoUrl,
+ quickAppList: data?.quickAppList || []
};
},
[t]
@@ -63,6 +72,7 @@ const HomepageSetting = ({ Header, onDiagramShow }: Props) => {
const wideLogoUrl = watch('wideLogoUrl');
const squareLogoUrl = watch('squareLogoUrl');
+ const formQuickApps = watch('quickAppList');
useMount(async () => {
reset(chatSettings2Form(await refreshChatSetting()));
@@ -105,8 +115,10 @@ const HomepageSetting = ({ Header, onDiagramShow }: Props) => {
const { runAsync: onSubmit, loading: isSaving } = useRequest2(
async (values: FormValues) => {
+ const { quickAppList, ...params } = values;
return updateChatSetting({
- ...values,
+ ...params,
+ quickAppIds: quickAppList.map((q) => q._id),
selectedTools: values.selectedTools.map((tool) => ({
pluginId: tool.pluginId,
inputs: tool.inputs
@@ -121,19 +133,16 @@ const HomepageSetting = ({ Header, onDiagramShow }: Props) => {
}
);
+ const {
+ isOpen: isOpenAddQuickApp,
+ onOpen: onOpenAddQuickApp,
+ onClose: onCloseAddQuickApp
+ } = useDisclosure();
+
return (
-
+
{
>
+ {/* QUICK APPS */}
+
+
+ {t('chat:setting.home.quick_apps')}
+
+
+
+
+ {(formQuickApps || []).length > 0 ? (
+
+ {formQuickApps.map((q) => (
+
+
+ {q.name}
+
+ ))}
+
+ ) : (
+
+ {t('chat:setting.home.quick_apps.placeholder')}
+
+ )}
+
+
+ }
+ aria-label="add quick apps"
+ variant="ghost"
+ size="sm"
+ color="primary.700"
+ onClick={onOpenAddQuickApp}
+ />
+
+
+
{/* AVAILABLE TOOLS */}
{
+
+ {isOpenAddQuickApp && (
+ q._id)}
+ onClose={onCloseAddQuickApp}
+ onConfirm={(list) => setValue('quickAppList', list)}
+ />
+ )}
);
};
diff --git a/projects/app/src/pageComponents/chat/ChatSetting/LogDetails.tsx b/projects/app/src/pageComponents/chat/ChatSetting/LogDetails.tsx
index b17673ba4dc1..5a7cbe6987c0 100644
--- a/projects/app/src/pageComponents/chat/ChatSetting/LogDetails.tsx
+++ b/projects/app/src/pageComponents/chat/ChatSetting/LogDetails.tsx
@@ -28,18 +28,11 @@ const LogDetails = ({ Header }: Props) => {
} = useMultipleSelect(Object.values(ChatSourceEnum), true);
return (
-
+
void;
+ tab: ChatSettingTabOptionEnum;
+ onTabChange: (tab: ChatSettingTabOptionEnum) => void;
children?: React.ReactNode;
};
-const SettingTabs = ({ tab, children, onChange }: Props) => {
+const SettingTabs = ({ tab, children, onTabChange }: Props) => {
const { t } = useTranslation();
- const tabOptions: Parameters>[0]['list'] =
- useMemo(
- () => [
- { label: t('chat:setting.home.title'), value: ChatSettingTabOptionEnum.HOME },
- {
- label: t('chat:setting.data_dashboard.title'),
- value: ChatSettingTabOptionEnum.DATA_DASHBOARD
- },
- { label: t('chat:setting.log_details.title'), value: ChatSettingTabOptionEnum.LOG_DETAILS }
- ],
- [t]
- );
+ const tabOptions: Parameters>[0]['list'] = useMemo(
+ () => [
+ {
+ label: t('chat:setting.home.title'),
+ value: ChatSettingTabOptionEnum.HOME
+ },
+ {
+ label: t('chat:setting.data_dashboard.title'),
+ value: ChatSettingTabOptionEnum.DATA_DASHBOARD
+ },
+ {
+ label: t('chat:setting.log_details.title'),
+ value: ChatSettingTabOptionEnum.LOG_DETAILS
+ },
+ {
+ label: t('chat:setting.favourite.title'),
+ value: ChatSettingTabOptionEnum.FAVOURITE_APPS
+ }
+ ],
+ [t]
+ );
return (
-
-
+
+
{children}
diff --git a/projects/app/src/pageComponents/chat/ChatSetting/index.tsx b/projects/app/src/pageComponents/chat/ChatSetting/index.tsx
index 9eb33c1b8c26..ca8faaf27117 100644
--- a/projects/app/src/pageComponents/chat/ChatSetting/index.tsx
+++ b/projects/app/src/pageComponents/chat/ChatSetting/index.tsx
@@ -1,10 +1,10 @@
import DiagramModal from '@/pageComponents/chat/ChatSetting/DiagramModal';
-import { type PropsWithChildren, useCallback, useState } from 'react';
-import { ChatSettingTabOptionEnum } from '@/pageComponents/chat/constants';
+import { type PropsWithChildren, useCallback, useMemo, useState } from 'react';
+import { ChatSettingTabOptionEnum, ChatSidebarPaneEnum } from '@/pageComponents/chat/constants';
import dynamic from 'next/dynamic';
import SettingTabs from '@/pageComponents/chat/ChatSetting/SettingTabs';
import { useSystem } from '@fastgpt/web/hooks/useSystem';
-import { Flex } from '@chakra-ui/react';
+import { Box, Flex, type FlexProps } from '@chakra-ui/react';
import { useContextSelector } from 'use-context-selector';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { ChatContext } from '@/web/core/chat/context/chatContext';
@@ -12,66 +12,107 @@ import NextHead from '@/components/common/NextHead';
import { ChatSettingContext } from '@/web/core/chat/context/chatSettingContext';
import ChatSliderMobileDrawer from '@/pageComponents/chat/slider/ChatSliderMobileDrawer';
import { useTranslation } from 'react-i18next';
+import { useMount } from 'ahooks';
+import { useSystemStore } from '@/web/common/system/useSystemStore';
+import { useRouter } from 'next/router';
const HomepageSetting = dynamic(() => import('@/pageComponents/chat/ChatSetting/HomepageSetting'));
const LogDetails = dynamic(() => import('@/pageComponents/chat/ChatSetting/LogDetails'));
const DataDashboard = dynamic(() => import('@/pageComponents/chat/ChatSetting/DataDashboard'));
+const FavouriteAppSetting = dynamic(
+ () => import('@/pageComponents/chat/ChatSetting/FavouriteAppSetting')
+);
const ChatSetting = () => {
- const { t } = useTranslation();
+ const router = useRouter();
const { isPc } = useSystem();
+ const { t } = useTranslation();
+ const { feConfigs } = useSystemStore();
+ const { tab: tabQuery } = router.query as { tab: ChatSettingTabOptionEnum };
const [isOpenDiagram, setIsOpenDiagram] = useState(false);
- const [tab, setTab] = useState<`${ChatSettingTabOptionEnum}`>('home');
-
+ const tab = useMemo(
+ () =>
+ Object.values(ChatSettingTabOptionEnum).includes(tabQuery)
+ ? tabQuery
+ : ChatSettingTabOptionEnum.HOME,
+ [tabQuery]
+ );
const onOpenSlider = useContextSelector(ChatContext, (v) => v.onOpenSlider);
const chatSettings = useContextSelector(ChatSettingContext, (v) => v.chatSettings);
+ const handlePaneChange = useContextSelector(ChatSettingContext, (v) => v.handlePaneChange);
+
+ const handleTabChange = useCallback(
+ (tab: ChatSettingTabOptionEnum) => {
+ handlePaneChange(ChatSidebarPaneEnum.SETTING, undefined, tab);
+ },
+ [handlePaneChange]
+ );
const SettingHeader = useCallback(
({ children }: PropsWithChildren) => (
-
+
{children}
),
- [tab, setTab]
+ [tab, handleTabChange]
);
+ useMount(() => {
+ if (!feConfigs?.isPlus) {
+ handlePaneChange(ChatSidebarPaneEnum.TEAM_APPS);
+ }
+ });
+
return (
<>
- {!isPc && (
- <>
-
-
+ {!isPc && (
+ <>
+
+
+
+
+
-
-
-
- >
- )}
-
- {/* homepage setting */}
- {tab === ChatSettingTabOptionEnum.HOME && (
-
- )}
-
- {/* data dashboard */}
- {tab === ChatSettingTabOptionEnum.DATA_DASHBOARD && }
-
- {/* log details */}
- {tab === ChatSettingTabOptionEnum.LOG_DETAILS && }
+ >
+ )}
+
+ {chatSettings && (
+
+ {/* homepage setting */}
+ {tab === ChatSettingTabOptionEnum.HOME && (
+
+ )}
+
+ {/* data dashboard */}
+ {tab === ChatSettingTabOptionEnum.DATA_DASHBOARD && (
+
+ )}
+
+ {/* log details */}
+ {tab === ChatSettingTabOptionEnum.LOG_DETAILS && }
+
+ {/* home chat logs */}
+ {tab === ChatSettingTabOptionEnum.FAVOURITE_APPS && (
+
+ )}
+
+ )}
+
>
diff --git a/projects/app/src/pageComponents/chat/ChatWindow/AppChatWindow.tsx b/projects/app/src/pageComponents/chat/ChatWindow/AppChatWindow.tsx
index a26477eb76db..0ec33549fb26 100644
--- a/projects/app/src/pageComponents/chat/ChatWindow/AppChatWindow.tsx
+++ b/projects/app/src/pageComponents/chat/ChatWindow/AppChatWindow.tsx
@@ -25,6 +25,10 @@ import { ChatSidebarPaneEnum } from '../constants';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import ChatHistorySidebar from '@/pageComponents/chat/slider/ChatSliderSidebar';
import ChatSliderMobileDrawer from '@/pageComponents/chat/slider/ChatSliderMobileDrawer';
+import dynamic from 'next/dynamic';
+import { getNanoid } from '@fastgpt/global/common/string/tools';
+
+const CustomPluginRunBox = dynamic(() => import('@/pageComponents/chat/CustomPluginRunBox'));
type Props = {
myApps: AppListItemType[];
@@ -33,7 +37,6 @@ type Props = {
const AppChatWindow = ({ myApps }: Props) => {
const { userInfo } = useUserStore();
const { chatId, appId, outLinkAuthData } = useChatStore();
- const { feConfigs } = useSystemStore();
const { t } = useTranslation();
const { isPc } = useSystem();
@@ -41,6 +44,8 @@ const AppChatWindow = ({ myApps }: Props) => {
const forbidLoadChat = useContextSelector(ChatContext, (v) => v.forbidLoadChat);
const onUpdateHistoryTitle = useContextSelector(ChatContext, (v) => v.onUpdateHistoryTitle);
+ const isPlugin = useContextSelector(ChatItemContext, (v) => v.isPlugin);
+ const onChangeChatId = useContextSelector(ChatContext, (v) => v.onChangeChatId);
const chatBoxData = useContextSelector(ChatItemContext, (v) => v.chatBoxData);
const datasetCiteData = useContextSelector(ChatItemContext, (v) => v.datasetCiteData);
const setChatBoxData = useContextSelector(ChatItemContext, (v) => v.setChatBoxData);
@@ -103,7 +108,7 @@ const AppChatWindow = ({ myApps }: Props) => {
onMessage: generatingMessage
});
- const newTitle = getChatTitleFromChatMessage(GPTMessages2Chats(histories)[0]);
+ const newTitle = getChatTitleFromChatMessage(GPTMessages2Chats({ messages: histories })[0]);
onUpdateHistoryTitle({ chatId, newTitle });
setChatBoxData((state) => ({
@@ -153,16 +158,26 @@ const AppChatWindow = ({ myApps }: Props) => {
/>
-
+ {isPlugin ? (
+ onChangeChatId(getNanoid())}
+ onStartChat={onStartChat}
+ />
+ ) : (
+
+ )}
diff --git a/projects/app/src/pageComponents/chat/ChatWindow/HomeChatWindow.tsx b/projects/app/src/pageComponents/chat/ChatWindow/HomeChatWindow.tsx
index 75bfd6d05f2a..bbbadc45ad1f 100644
--- a/projects/app/src/pageComponents/chat/ChatWindow/HomeChatWindow.tsx
+++ b/projects/app/src/pageComponents/chat/ChatWindow/HomeChatWindow.tsx
@@ -16,7 +16,7 @@ import { ChatContext } from '@/web/core/chat/context/chatContext';
import { useContextSelector } from 'use-context-selector';
import { ChatItemContext } from '@/web/core/chat/context/chatItemContext';
import { ChatTypeEnum } from '@/components/core/chat/ChatContainer/ChatBox/constants';
-import React, { useMemo, useEffect } from 'react';
+import React, { useMemo, useEffect, useRef, useState } from 'react';
import type { StartChatFnProps } from '@/components/core/chat/ChatContainer/type';
import { streamFetch } from '@/web/common/api/fetch';
import { getChatTitleFromChatMessage } from '@fastgpt/global/core/chat/utils';
@@ -47,6 +47,7 @@ import { ChatRecordContext } from '@/web/core/chat/context/chatRecordContext';
import { ChatSidebarPaneEnum } from '../constants';
import ChatHistorySidebar from '@/pageComponents/chat/slider/ChatSliderSidebar';
import ChatSliderMobileDrawer from '@/pageComponents/chat/slider/ChatSliderMobileDrawer';
+import type { QuickAppType } from '@fastgpt/global/core/chat/setting/type';
type Props = {
myApps: AppListItemType[];
@@ -65,7 +66,8 @@ const defaultWhisperConfig: AppWhisperConfigType = {
};
const HomeChatWindow = ({ myApps }: Props) => {
- const { t } = useTranslation();
+ const { t, i18n } = useTranslation();
+ const language = i18n.language;
const { isPc } = useSystem();
const { userInfo } = useUserStore();
@@ -74,6 +76,7 @@ const HomeChatWindow = ({ myApps }: Props) => {
const forbidLoadChat = useContextSelector(ChatContext, (v) => v.forbidLoadChat);
const onUpdateHistoryTitle = useContextSelector(ChatContext, (v) => v.onUpdateHistoryTitle);
+ const onChangeGlobalAppId = useContextSelector(ChatContext, (v) => v.onChangeAppId);
const chatBoxData = useContextSelector(ChatItemContext, (v) => v.chatBoxData);
const datasetCiteData = useContextSelector(ChatItemContext, (v) => v.datasetCiteData);
@@ -83,10 +86,16 @@ const HomeChatWindow = ({ myApps }: Props) => {
const pane = useContextSelector(ChatSettingContext, (v) => v.pane);
const chatSettings = useContextSelector(ChatSettingContext, (v) => v.chatSettings);
const handlePaneChange = useContextSelector(ChatSettingContext, (v) => v.handlePaneChange);
+ const homeAppId = useContextSelector(ChatSettingContext, (v) => v.chatSettings?.appId || '');
const chatRecords = useContextSelector(ChatRecordContext, (v) => v.chatRecords);
const totalRecordsCount = useContextSelector(ChatRecordContext, (v) => v.totalRecordsCount);
+ const isQuickApp = useMemo(
+ () => chatSettings?.quickAppList.some((app) => app._id === appId),
+ [chatSettings?.quickAppList, appId]
+ );
+
const availableModels = useMemo(
() => llmModelList.map((model) => ({ value: model.model, label: model.name })),
[llmModelList]
@@ -126,23 +135,26 @@ const HomeChatWindow = ({ myApps }: Props) => {
const modelData = getWebLLMModel(selectedModel);
const res = await getInitChatInfo({ appId, chatId });
res.userAvatar = userInfo?.avatar;
- if (!res.app.chatConfig) {
- res.app.chatConfig = {
- fileSelectConfig: {
+
+ if (!isQuickApp) {
+ if (!res.app.chatConfig) {
+ res.app.chatConfig = {
+ fileSelectConfig: {
+ ...defaultFileSelectConfig,
+ canSelectImg: !!modelData.vision
+ },
+ whisperConfig: defaultWhisperConfig
+ };
+ } else {
+ res.app.chatConfig.fileSelectConfig = {
...defaultFileSelectConfig,
canSelectImg: !!modelData.vision
- },
- whisperConfig: defaultWhisperConfig
- };
- } else {
- res.app.chatConfig.fileSelectConfig = {
- ...defaultFileSelectConfig,
- canSelectImg: !!modelData.vision
- };
- res.app.chatConfig.whisperConfig = {
- ...defaultWhisperConfig,
- open: true
- };
+ };
+ res.app.chatConfig.whisperConfig = {
+ ...defaultWhisperConfig,
+ open: true
+ };
+ }
}
setChatBoxData(res);
@@ -169,13 +181,21 @@ const HomeChatWindow = ({ myApps }: Props) => {
}
);
+ const handleSwitchQuickApp = async (id: string) => {
+ if (isQuickApp && appId === id) {
+ onChangeGlobalAppId(homeAppId);
+ return;
+ }
+ onChangeGlobalAppId(id);
+ };
+
useMount(() => {
if (!feConfigs?.isPlus) {
handlePaneChange(ChatSidebarPaneEnum.TEAM_APPS);
}
});
- // 使用类似AppChatWindow的对话逻辑
+ // 使用类似 AppChatWindow 的对话逻辑
const onStartChat = useMemoizedFn(
async ({
messages,
@@ -184,13 +204,38 @@ const HomeChatWindow = ({ myApps }: Props) => {
responseChatItemId,
generatingMessage
}: StartChatFnProps) => {
+ const histories = messages.slice(-1);
+
+ // using original workflow of quick app
+ if (isQuickApp && appId) {
+ const { responseText } = await streamFetch({
+ data: {
+ messages: histories,
+ variables,
+ responseChatItemId,
+ appId,
+ chatId
+ },
+ abortCtrl: controller,
+ onMessage: generatingMessage
+ });
+
+ const newTitle = getChatTitleFromChatMessage(GPTMessages2Chats({ messages: histories })[0]);
+
+ onUpdateHistoryTitle({ chatId, newTitle });
+ setChatBoxData((state) => ({
+ ...state,
+ title: newTitle
+ }));
+
+ return { responseText, isNewChat: forbidLoadChat.current };
+ }
+
+ // not quick app, using model and tools selected on home page
if (!selectedModel) {
return Promise.reject('No model selected');
}
- const histories = messages.slice(-1);
-
- // 根据所选工具 ID 动态拉取节点,并填充默认输入
const tools: FlowNodeTemplateType[] = await Promise.all(
selectedToolIds.map(async (toolId) => {
const node = await getPreviewPluginNode({ appId: toolId });
@@ -223,7 +268,7 @@ const HomeChatWindow = ({ myApps }: Props) => {
abortCtrl: controller
});
- const newTitle = getChatTitleFromChatMessage(GPTMessages2Chats(histories)[0]);
+ const newTitle = getChatTitleFromChatMessage(GPTMessages2Chats({ messages: histories })[0]);
onUpdateHistoryTitle({ chatId, newTitle });
setChatBoxData((state) => ({
@@ -237,104 +282,104 @@ const HomeChatWindow = ({ myApps }: Props) => {
// 自定义按钮组(模型选择和工具选择)
const InputLeftComponent = useMemo(
- () => (
- <>
- {/* 模型选择 */}
- {availableModels.length > 0 && (
-
- {
- setChatBoxData((state) => ({
- ...state,
- app: {
- ...state.app,
- chatConfig: {
- ...state.app.chatConfig,
- fileSelectConfig: {
- ...defaultFileSelectConfig,
- canSelectImg: !!getWebLLMModel(model).vision
+ () =>
+ isQuickApp ? undefined : (
+ <>
+ {/* 模型选择 */}
+ {availableModels.length > 0 && (
+
+ {
+ setChatBoxData((state) => ({
+ ...state,
+ app: {
+ ...state.app,
+ chatConfig: {
+ ...state.app.chatConfig,
+ fileSelectConfig: {
+ ...defaultFileSelectConfig,
+ canSelectImg: !!getWebLLMModel(model).vision
+ }
}
}
- }
- }));
- setSelectedModel(model);
- }}
- />
-
- )}
-
- {/* 工具选择下拉框 */}
- {availableTools.length > 0 && (
-
- )}
- >
- ),
+ }));
+ setSelectedModel(model);
+ }}
+ />
+
+ )}
+
+ {/* 工具选择下拉框 */}
+ {availableTools.length > 0 && (
+
+ )}
+ >
+ ),
[
availableModels,
selectedModel,
@@ -345,7 +390,8 @@ const HomeChatWindow = ({ myApps }: Props) => {
selectedToolIds,
setSelectedToolIds,
setChatBoxData,
- isPc
+ isPc,
+ isQuickApp
]
);
@@ -358,7 +404,7 @@ const HomeChatWindow = ({ myApps }: Props) => {
{isPc ? (
@@ -409,12 +455,15 @@ const HomeChatWindow = ({ myApps }: Props) => {
isReady={!loading}
feedbackType={'user'}
chatType={ChatTypeEnum.home}
+ slogan={chatSettings?.slogan}
outLinkAuthData={outLinkAuthData}
- onStartChat={onStartChat}
- InputLeftComponent={InputLeftComponent}
- dialogTips={chatSettings?.dialogTips}
wideLogo={chatSettings?.wideLogoUrl}
- slogan={chatSettings?.slogan}
+ dialogTips={chatSettings?.dialogTips}
+ InputLeftComponent={InputLeftComponent}
+ onStartChat={onStartChat}
+ quickAppList={chatSettings?.quickAppList || []}
+ currentQuickAppId={isQuickApp ? appId : undefined}
+ onSwitchQuickApp={handleSwitchQuickApp}
/>
diff --git a/projects/app/src/pageComponents/chat/constants.ts b/projects/app/src/pageComponents/chat/constants.ts
index 7a01ca0ce1f0..20835ac70ac1 100644
--- a/projects/app/src/pageComponents/chat/constants.ts
+++ b/projects/app/src/pageComponents/chat/constants.ts
@@ -16,10 +16,10 @@ export type CollapseStatusType = 0 | 1;
export const defaultCollapseStatus: CollapseStatusType = 0; // default expanded
export enum ChatSettingTabOptionEnum {
- HOME = 'home',
- FAVORITE_APPS = 'favorite_apps',
- DATA_DASHBOARD = 'data_dashboard',
- LOG_DETAILS = 'log_details'
+ HOME = 'h',
+ DATA_DASHBOARD = 'd',
+ LOG_DETAILS = 'l',
+ FAVOURITE_APPS = 'f'
}
export const DEFAULT_LOGO_BANNER_URL = '/imgs/chat/fastgpt_banner.svg';
diff --git a/projects/app/src/pageComponents/chat/slider/ChatSliderHeader.tsx b/projects/app/src/pageComponents/chat/slider/ChatSliderHeader.tsx
index e71fb50d4d31..c468d84c266f 100644
--- a/projects/app/src/pageComponents/chat/slider/ChatSliderHeader.tsx
+++ b/projects/app/src/pageComponents/chat/slider/ChatSliderHeader.tsx
@@ -34,6 +34,7 @@ const ChatSliderHeader = ({ title, banner }: Props) => {
const isHomePane = pane === ChatSidebarPaneEnum.HOME;
const isTeamAppsPane = pane === ChatSidebarPaneEnum.TEAM_APPS;
+ const isFavouriteAppPane = pane === ChatSidebarPaneEnum.FAVORITE_APPS;
return isPc ? (
@@ -88,6 +89,34 @@ const ChatSliderHeader = ({ title, banner }: Props) => {
+ {
+ handlePaneChange(ChatSidebarPaneEnum.FAVORITE_APPS);
+ onCloseSlider();
+ setChatId();
+ }}
+ >
+
+
+
+ {t('chat:sidebar.favourite_apps')}
+
+
+
+
{
handlePaneChange(ChatSidebarPaneEnum.TEAM_APPS);
diff --git a/projects/app/src/pageComponents/chat/SliderApps.tsx b/projects/app/src/pageComponents/chat/slider/index.tsx
similarity index 83%
rename from projects/app/src/pageComponents/chat/SliderApps.tsx
rename to projects/app/src/pageComponents/chat/slider/index.tsx
index 592a2cc0c20f..596d2cd00223 100644
--- a/projects/app/src/pageComponents/chat/SliderApps.tsx
+++ b/projects/app/src/pageComponents/chat/slider/index.tsx
@@ -2,7 +2,6 @@ import React, { useCallback } from 'react';
import type { BoxProps } from '@chakra-ui/react';
import { Flex, Box, HStack, Image } from '@chakra-ui/react';
import { motion, AnimatePresence } from 'framer-motion';
-import { useRouter } from 'next/router';
import { useTranslation } from 'next-i18next';
import Avatar from '@fastgpt/web/components/common/Avatar';
import { type AppListItemType } from '@fastgpt/global/core/app/type';
@@ -11,12 +10,6 @@ import { useUserStore } from '@/web/support/user/useUserStore';
import UserAvatarPopover from '@/pageComponents/chat/UserAvatarPopover';
import MyBox from '@fastgpt/web/components/common/MyBox';
import MyIcon from '@fastgpt/web/components/common/Icon';
-import type {
- GetResourceFolderListProps,
- GetResourceListItemResponse
-} from '@fastgpt/global/common/parentFolder/type';
-import { getMyApps } from '@/web/core/app/api';
-import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
import {
ChatSidebarPaneEnum,
DEFAULT_LOGO_BANNER_COLLAPSED_URL,
@@ -230,12 +223,18 @@ const ActionButton: React.FC<{
borderRadius={'8px'}
alignItems={'center'}
justifyContent={isCollapsed ? 'center' : 'flex-start'}
- bg={isActive ? 'primary.100' : 'transparent'}
- color={isActive ? 'primary.600' : 'myGray.500'}
- _hover={{
- bg: isCollapsed ? 'myGray.200' : 'primary.100',
- color: 'primary.600'
- }}
+ {...(isActive
+ ? {
+ bg: 'primary.100',
+ color: 'primary.600'
+ }
+ : {
+ bg: 'transparent',
+ color: 'myGray.500',
+ _hover: {
+ bg: isCollapsed ? 'myGray.200' : 'primary.100'
+ }
+ })}
onClick={onClick}
>
@@ -255,7 +254,6 @@ const ActionButton: React.FC<{
const NavigationSection = () => {
const { t } = useTranslation();
const { feConfigs } = useSystemStore();
- const isProVersion = !!feConfigs.isPlus;
const isCollapsed = useContextSelector(ChatSettingContext, (v) => v.collapse === 1);
const onTriggerCollapse = useContextSelector(ChatSettingContext, (v) => v.onTriggerCollapse);
@@ -267,7 +265,11 @@ const NavigationSection = () => {
ChatSettingContext,
(v) => v.pane === ChatSidebarPaneEnum.TEAM_APPS
);
- const onHomeClick = useContextSelector(ChatSettingContext, (v) => v.handlePaneChange);
+ const isFavouriteAppsActive = useContextSelector(
+ ChatSettingContext,
+ (v) => v.pane === ChatSidebarPaneEnum.FAVORITE_APPS
+ );
+ const handlePaneChange = useContextSelector(ChatSettingContext, (v) => v.handlePaneChange);
return (
@@ -275,50 +277,67 @@ const NavigationSection = () => {
- {isProVersion && (
-
- {isCollapsed ? (
-
- onHomeClick(ChatSidebarPaneEnum.HOME)}
- />
-
- ) : (
-
- onHomeClick(ChatSidebarPaneEnum.HOME)}
- />
-
- )}
-
- )}
-
{isCollapsed ? (
- onHomeClick(ChatSidebarPaneEnum.TEAM_APPS)}
- />
+
+ {feConfigs.isPlus && (
+ <>
+ handlePaneChange(ChatSidebarPaneEnum.HOME)}
+ />
+
+ handlePaneChange(ChatSidebarPaneEnum.FAVORITE_APPS)}
+ />
+ >
+ )}
+
+ handlePaneChange(ChatSidebarPaneEnum.TEAM_APPS)}
+ />
+
) : (
- onHomeClick(ChatSidebarPaneEnum.TEAM_APPS)}
- />
+
+ {feConfigs.isPlus && (
+ <>
+ handlePaneChange(ChatSidebarPaneEnum.HOME)}
+ />
+
+ handlePaneChange(ChatSidebarPaneEnum.FAVORITE_APPS)}
+ />
+ >
+ )}
+
+ handlePaneChange(ChatSidebarPaneEnum.TEAM_APPS)}
+ />
+
)}
@@ -459,7 +478,7 @@ const BottomSection = () => {
);
};
-const SliderApps = ({ apps, activeAppId }: Props) => {
+const ChatSlider = ({ apps, activeAppId }: Props) => {
const { t } = useTranslation();
const isCollapsed = useContextSelector(ChatSettingContext, (v) => v.collapse === 1);
@@ -467,9 +486,6 @@ const SliderApps = ({ apps, activeAppId }: Props) => {
const handlePaneChange = useContextSelector(ChatSettingContext, (v) => v.handlePaneChange);
- const isRecentlyUsedAppSelected = (id: string) =>
- pane === ChatSidebarPaneEnum.RECENTLY_USED_APPS && id === activeAppId;
-
return (
{
}}
animate={isCollapsed ? 'folded' : 'expanded'}
initial={false}
+ userSelect={'none'}
>
@@ -517,10 +534,10 @@ const SliderApps = ({ apps, activeAppId }: Props) => {
borderRadius={'md'}
alignItems={'center'}
fontSize={'sm'}
- {...(isRecentlyUsedAppSelected(item._id)
+ {...(pane === ChatSidebarPaneEnum.RECENTLY_USED_APPS && item._id === activeAppId
? { bg: 'primary.100', color: 'primary.600' }
: {
- _hover: { bg: 'primary.100', color: 'primary.600' },
+ _hover: { bg: 'primary.100' },
onClick: () =>
handlePaneChange(ChatSidebarPaneEnum.RECENTLY_USED_APPS, item._id)
})}
@@ -539,4 +556,4 @@ const SliderApps = ({ apps, activeAppId }: Props) => {
);
};
-export default React.memo(SliderApps);
+export default React.memo(ChatSlider);
diff --git a/projects/app/src/pageComponents/dataset/detail/CollectionCard/TrainingStates.tsx b/projects/app/src/pageComponents/dataset/detail/CollectionCard/TrainingStates.tsx
index 1dde7fe0eaa1..97dbe638aff5 100644
--- a/projects/app/src/pageComponents/dataset/detail/CollectionCard/TrainingStates.tsx
+++ b/projects/app/src/pageComponents/dataset/detail/CollectionCard/TrainingStates.tsx
@@ -541,7 +541,7 @@ const TrainingStates = ({
}
);
- const errorCounts = (Object.values(trainingDetail?.errorCounts || {}) as number[]).reduce(
+ const errorCounts = Object.values(trainingDetail?.errorCounts || {}).reduce(
(acc, count) => acc + count,
0
);
diff --git a/projects/app/src/pages/api/core/ai/model/test.ts b/projects/app/src/pages/api/core/ai/model/test.ts
index 974319619f7f..c6efb37fd5f0 100644
--- a/projects/app/src/pages/api/core/ai/model/test.ts
+++ b/projects/app/src/pages/api/core/ai/model/test.ts
@@ -9,14 +9,14 @@ import {
type STTModelType,
type TTSModelType
} from '@fastgpt/global/core/ai/model.d';
-import { createChatCompletion, getAIApi } from '@fastgpt/service/core/ai/config';
+import { getAIApi } from '@fastgpt/service/core/ai/config';
import { addLog } from '@fastgpt/service/common/system/log';
import { getVectorsByText } from '@fastgpt/service/core/ai/embedding';
import { reRankRecall } from '@fastgpt/service/core/ai/rerank';
import { aiTranscriptions } from '@fastgpt/service/core/ai/audio/transcriptions';
import { isProduction } from '@fastgpt/global/common/system/constants';
import * as fs from 'fs';
-import { llmCompletionsBodyFormat, formatLLMResponse } from '@fastgpt/service/core/ai/utils';
+import { createLLMResponse } from '@fastgpt/service/core/ai/llm/request';
export type testQuery = { model: string; channelId?: number };
@@ -69,29 +69,17 @@ async function handler(
export default NextAPI(handler);
const testLLMModel = async (model: LLMModelItemType, headers: Record) => {
- const requestBody = llmCompletionsBodyFormat(
- {
+ const { answerText } = await createLLMResponse({
+ body: {
model: model.model,
messages: [{ role: 'user', content: 'hi' }],
stream: true
},
- model
- );
-
- const { response } = await createChatCompletion({
- modelData: model,
- body: requestBody,
- options: {
- headers: {
- Accept: 'application/json, text/plain, */*',
- ...headers
- }
- }
+ custonHeaders: headers
});
- const { text: answer } = await formatLLMResponse(response);
- if (answer) {
- return answer;
+ if (answerText) {
+ return answerText;
}
return Promise.reject('Model response empty');
diff --git a/projects/app/src/pages/api/core/ai/optimizePrompt.ts b/projects/app/src/pages/api/core/ai/optimizePrompt.ts
index b96f14e88b1b..5326c8bdec37 100644
--- a/projects/app/src/pages/api/core/ai/optimizePrompt.ts
+++ b/projects/app/src/pages/api/core/ai/optimizePrompt.ts
@@ -3,18 +3,15 @@ import { NextAPI } from '@/service/middleware/entry';
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import { responseWrite } from '@fastgpt/service/common/response';
import { sseErrRes } from '@fastgpt/service/common/response';
-import { createChatCompletion } from '@fastgpt/service/core/ai/config';
import type { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
-import { loadRequestMessages } from '@fastgpt/service/core/chat/utils';
-import { llmCompletionsBodyFormat, parseLLMStreamResponse } from '@fastgpt/service/core/ai/utils';
-import { countGptMessagesTokens } from '@fastgpt/service/common/string/tiktoken/index';
import { formatModelChars2Points } from '@fastgpt/service/support/wallet/usage/utils';
import { createUsage } from '@fastgpt/service/support/wallet/usage/controller';
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
import { i18nT } from '@fastgpt/web/i18n/utils';
import { addLog } from '@fastgpt/service/common/system/log';
+import { createLLMResponse } from '@fastgpt/service/core/ai/llm/request';
type OptimizePromptBody = {
originalPrompt: string;
@@ -100,67 +97,17 @@ async function handler(req: ApiRequestProps, res: ApiRespons
}
];
- const requestMessages = await loadRequestMessages({
- messages,
- useVision: false
- });
-
- const { response, isStreamResponse } = await createChatCompletion({
- body: llmCompletionsBodyFormat(
- {
- model,
- messages: requestMessages,
- temperature: 0.1,
- max_tokens: 2000,
- stream: true
- },
- model
- )
- });
-
- const { inputTokens, outputTokens } = await (async () => {
- if (isStreamResponse) {
- const { parsePart, getResponseData } = parseLLMStreamResponse();
-
- let optimizedText = '';
-
- for await (const part of response) {
- const { responseContent } = parsePart({
- part,
- parseThinkTag: true,
- retainDatasetCite: false
- });
-
- if (responseContent) {
- optimizedText += responseContent;
- responseWrite({
- res,
- event: SseResponseEventEnum.answer,
- data: JSON.stringify({
- choices: [
- {
- delta: {
- content: responseContent
- }
- }
- ]
- })
- });
- }
- }
-
- const { content: answer, usage } = getResponseData();
- return {
- content: answer,
- inputTokens: usage?.prompt_tokens || (await countGptMessagesTokens(requestMessages)),
- outputTokens:
- usage?.completion_tokens ||
- (await countGptMessagesTokens([{ role: 'assistant', content: optimizedText }]))
- };
- } else {
- const usage = response.usage;
- const content = response.choices?.[0]?.message?.content || '';
-
+ const {
+ usage: { inputTokens, outputTokens }
+ } = await createLLMResponse({
+ body: {
+ model,
+ messages,
+ temperature: 0.1,
+ max_tokens: 2000,
+ stream: true
+ },
+ onStreaming: ({ text }) => {
responseWrite({
res,
event: SseResponseEventEnum.answer,
@@ -168,22 +115,15 @@ async function handler(req: ApiRequestProps, res: ApiRespons
choices: [
{
delta: {
- content
+ content: text
}
}
]
})
});
-
- return {
- content,
- inputTokens: usage?.prompt_tokens || (await countGptMessagesTokens(requestMessages)),
- outputTokens:
- usage?.completion_tokens ||
- (await countGptMessagesTokens([{ role: 'assistant', content: content }]))
- };
}
- })();
+ });
+
responseWrite({
res,
event: SseResponseEventEnum.answer,
@@ -193,8 +133,7 @@ async function handler(req: ApiRequestProps, res: ApiRespons
const { totalPoints, modelName } = formatModelChars2Points({
model,
inputTokens,
- outputTokens,
- modelType: ModelTypeEnum.llm
+ outputTokens
});
createUsage({
diff --git a/projects/app/src/pages/api/core/app/list.ts b/projects/app/src/pages/api/core/app/list.ts
index 27811a196f45..b65d8b0cdbf6 100644
--- a/projects/app/src/pages/api/core/app/list.ts
+++ b/projects/app/src/pages/api/core/app/list.ts
@@ -217,6 +217,7 @@ async function handler(req: ApiRequestProps): Promise, res: NextApiRe
);
// update app
- await MongoApp.findByIdAndUpdate(
- appId,
+ await MongoApp.updateOne(
+ { _id: appId },
{
modules: nodes,
edges,
diff --git a/projects/app/src/pages/api/core/chat/chatTest.ts b/projects/app/src/pages/api/core/chat/chatTest.ts
index e43e8ca103eb..761f361bcfcd 100644
--- a/projects/app/src/pages/api/core/chat/chatTest.ts
+++ b/projects/app/src/pages/api/core/chat/chatTest.ts
@@ -83,7 +83,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
if (!Array.isArray(edges)) {
throw new Error('Edges is not array');
}
- const chatMessages = GPTMessages2Chats(messages);
+ const chatMessages = GPTMessages2Chats({ messages });
// console.log(JSON.stringify(chatMessages, null, 2), '====', chatMessages.length);
/* user auth */
diff --git a/projects/app/src/pages/api/core/chat/feedback/updateUserFeedback.ts b/projects/app/src/pages/api/core/chat/feedback/updateUserFeedback.ts
index d8d6120f1e59..30ba3bc7c399 100644
--- a/projects/app/src/pages/api/core/chat/feedback/updateUserFeedback.ts
+++ b/projects/app/src/pages/api/core/chat/feedback/updateUserFeedback.ts
@@ -60,7 +60,7 @@ async function handler(req: ApiRequestProps, res: NextA
return 0;
})();
- await MongoAppChatLog.updateOne(
+ await MongoAppChatLog.findOneAndUpdate(
{
teamId,
appId,
diff --git a/projects/app/src/pages/api/core/dataset/collection/trainingDetail.ts b/projects/app/src/pages/api/core/dataset/collection/trainingDetail.ts
index d656507c13cc..76b9abfd27d0 100644
--- a/projects/app/src/pages/api/core/dataset/collection/trainingDetail.ts
+++ b/projects/app/src/pages/api/core/dataset/collection/trainingDetail.ts
@@ -112,7 +112,7 @@ async function handler(
{
$match: {
...match,
- retryCount: { $lte: 0 },
+ // retryCount: { $lte: 0 },
errorMsg: { $exists: true }
}
},
diff --git a/projects/app/src/pages/api/system/pluginImgs/[...path].ts b/projects/app/src/pages/api/system/plugin/[...path].ts
similarity index 95%
rename from projects/app/src/pages/api/system/pluginImgs/[...path].ts
rename to projects/app/src/pages/api/system/plugin/[...path].ts
index 983868ce1ee4..d0744b87a8d6 100644
--- a/projects/app/src/pages/api/system/pluginImgs/[...path].ts
+++ b/projects/app/src/pages/api/system/plugin/[...path].ts
@@ -6,7 +6,7 @@ import { FastGPTPluginUrl } from '@fastgpt/service/common/system/constants';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { path = [] } = req.query as any;
- const requestPath = `/imgs/tools/${path?.join('/')}`;
+ const requestPath = `/imgs/${path?.join('/')}`;
if (!requestPath) {
throw new Error('url is empty');
diff --git a/projects/app/src/pages/api/v1/chat/completions.ts b/projects/app/src/pages/api/v1/chat/completions.ts
index 8e8415c3086f..cd210e8a5707 100644
--- a/projects/app/src/pages/api/v1/chat/completions.ts
+++ b/projects/app/src/pages/api/v1/chat/completions.ts
@@ -132,7 +132,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
API params: chatId + [Human]
API params: [histories, Human]
*/
- const chatMessages = GPTMessages2Chats(messages);
+ const chatMessages = GPTMessages2Chats({ messages });
// Computed start hook params
const startHookText = (() => {
diff --git a/projects/app/src/pages/api/v2/chat/completions.ts b/projects/app/src/pages/api/v2/chat/completions.ts
index d5da272ecdb2..13e309066b12 100644
--- a/projects/app/src/pages/api/v2/chat/completions.ts
+++ b/projects/app/src/pages/api/v2/chat/completions.ts
@@ -132,7 +132,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
API params: chatId + [Human]
API params: [histories, Human]
*/
- const chatMessages = GPTMessages2Chats(messages);
+ const chatMessages = GPTMessages2Chats({ messages });
// Computed start hook params
const startHookText = (() => {
diff --git a/projects/app/src/pages/chat/index.tsx b/projects/app/src/pages/chat/index.tsx
index 9a676b484495..b3a70f98465b 100644
--- a/projects/app/src/pages/chat/index.tsx
+++ b/projects/app/src/pages/chat/index.tsx
@@ -3,7 +3,7 @@ import NextHead from '@/components/common/NextHead';
import { Box, Flex } from '@chakra-ui/react';
import { useChatStore } from '@/web/core/chat/context/useChatStore';
import PageContainer from '@/components/PageContainer';
-import SliderApps from '@/pageComponents/chat/SliderApps';
+import ChatSlider from '@/pageComponents/chat/slider';
import { serviceSideProps } from '@/web/common/i18n/utils';
import { ChatSidebarPaneEnum } from '@/pageComponents/chat/constants';
import { GetChatTypeEnum } from '@/global/core/chat/constants';
@@ -26,6 +26,7 @@ import {
ChatSettingContextProvider
} from '@/web/core/chat/context/chatSettingContext';
import ChatTeamApp from '@/pageComponents/chat/ChatTeamApp';
+import ChatFavouriteApp from '@/pageComponents/chat/ChatFavouriteApp';
const Chat = ({ myApps }: { myApps: AppListItemType[] }) => {
const { isPc } = useSystem();
@@ -49,7 +50,7 @@ const Chat = ({ myApps }: { myApps: AppListItemType[] }) => {
overflow={'hidden'}
transition={'width 0.1s ease-in-out'}
>
-
+
)}
@@ -58,12 +59,15 @@ const Chat = ({ myApps }: { myApps: AppListItemType[] }) => {
{/* home chat window */}
{pane === ChatSidebarPaneEnum.HOME && }
- {/* recently used apps chat window */}
- {pane === ChatSidebarPaneEnum.RECENTLY_USED_APPS && }
+ {/* favourite apps */}
+ {pane === ChatSidebarPaneEnum.FAVORITE_APPS && }
{/* team apps */}
{pane === ChatSidebarPaneEnum.TEAM_APPS && }
+ {/* recently used apps chat window */}
+ {pane === ChatSidebarPaneEnum.RECENTLY_USED_APPS && }
+
{/* setting */}
{pane === ChatSidebarPaneEnum.SETTING && }
diff --git a/projects/app/src/pages/chat/share.tsx b/projects/app/src/pages/chat/share.tsx
index b588d6f319d8..df6dc63d210c 100644
--- a/projects/app/src/pages/chat/share.tsx
+++ b/projects/app/src/pages/chat/share.tsx
@@ -173,7 +173,7 @@ const OutLink = (props: Props) => {
abortCtrl: controller
});
- const newTitle = getChatTitleFromChatMessage(GPTMessages2Chats(histories)[0]);
+ const newTitle = getChatTitleFromChatMessage(GPTMessages2Chats({ messages: histories })[0]);
// new chat
if (completionChatId !== chatId) {
diff --git a/projects/app/src/service/core/dataset/queues/generateQA.ts b/projects/app/src/service/core/dataset/queues/generateQA.ts
index 8238c31e03a7..26468efca564 100644
--- a/projects/app/src/service/core/dataset/queues/generateQA.ts
+++ b/projects/app/src/service/core/dataset/queues/generateQA.ts
@@ -1,7 +1,6 @@
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
import { pushLLMTrainingUsage } from '@fastgpt/service/support/wallet/usage/controller';
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
-import { createChatCompletion } from '@fastgpt/service/core/ai/config';
import type { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type.d';
import { addLog } from '@fastgpt/service/common/system/log';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
@@ -10,12 +9,6 @@ import type { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api
import { getLLMModel } from '@fastgpt/service/core/ai/model';
import { checkTeamAiPointsAndLock } from './utils';
import { addMinutes } from 'date-fns';
-import {
- countGptMessagesTokens,
- countPromptTokens
-} from '@fastgpt/service/common/string/tiktoken/index';
-import { loadRequestMessages } from '@fastgpt/service/core/chat/utils';
-import { llmCompletionsBodyFormat, formatLLMResponse } from '@fastgpt/service/core/ai/utils';
import type { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import {
chunkAutoChunkSize,
@@ -25,6 +18,7 @@ import { getErrText } from '@fastgpt/global/common/error/utils';
import { text2Chunks } from '@fastgpt/service/worker/function';
import { pushDataListToTrainingQueue } from '@fastgpt/service/core/dataset/training/controller';
import { delay } from '@fastgpt/service/common/bullmq';
+import { createLLMResponse } from '@fastgpt/service/core/ai/llm/request';
const reduceQueue = () => {
global.qaQueueLen = global.qaQueueLen > 0 ? global.qaQueueLen - 1 : 0;
@@ -130,20 +124,17 @@ export async function generateQA(): Promise {
}
];
- const { response: chatResponse } = await createChatCompletion({
- body: llmCompletionsBodyFormat(
- {
- model: modelData.model,
- temperature: 0.3,
- messages: await loadRequestMessages({ messages, useVision: false }),
- stream: true
- },
- modelData
- )
+ const {
+ answerText: answer,
+ usage: { inputTokens, outputTokens }
+ } = await createLLMResponse({
+ body: {
+ model: modelData.model,
+ temperature: 0.3,
+ messages,
+ stream: true
+ }
});
- const { text: answer, usage } = await formatLLMResponse(chatResponse);
- const inputTokens = usage?.prompt_tokens || (await countGptMessagesTokens(messages));
- const outputTokens = usage?.completion_tokens || (await countPromptTokens(answer));
const qaArr = await formatSplitText({ answer, rawText: text, llmModel: modelData }); // 格式化后的QA对
@@ -181,7 +172,7 @@ export async function generateQA(): Promise {
addLog.info(`[QA Queue] Finish`, {
time: Date.now() - startTime,
splitLength: qaArr.length,
- usage
+ usage: { inputTokens, outputTokens }
});
} catch (err: any) {
addLog.error(`[QA Queue] Error`, err);
diff --git a/projects/app/src/service/support/mcp/utils.ts b/projects/app/src/service/support/mcp/utils.ts
index ec9355693d63..7db003bb8c9e 100644
--- a/projects/app/src/service/support/mcp/utils.ts
+++ b/projects/app/src/service/support/mcp/utils.ts
@@ -38,7 +38,7 @@ import { saveChat } from '@fastgpt/service/core/chat/saveChat';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import { createChatUsage } from '@fastgpt/service/support/wallet/usage/controller';
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
-import { removeDatasetCiteText } from '@fastgpt/service/core/ai/utils';
+import { removeDatasetCiteText } from '@fastgpt/global/core/ai/llm/utils';
export const pluginNodes2InputSchema = (
nodes: { flowNodeType: FlowNodeTypeEnum; inputs: FlowNodeInputItemType[] }[]
diff --git a/projects/app/src/service/support/wallet/usage/push.ts b/projects/app/src/service/support/wallet/usage/push.ts
index d0b627443788..bc5918f1739d 100644
--- a/projects/app/src/service/support/wallet/usage/push.ts
+++ b/projects/app/src/service/support/wallet/usage/push.ts
@@ -35,7 +35,6 @@ export const pushGenerateVectorUsage = ({
deepSearchOutputTokens?: number;
}) => {
const { totalPoints: totalVector, modelName: vectorModelName } = formatModelChars2Points({
- modelType: ModelTypeEnum.embedding,
model,
inputTokens
});
@@ -47,7 +46,6 @@ export const pushGenerateVectorUsage = ({
extensionModelName: ''
};
const { totalPoints, modelName } = formatModelChars2Points({
- modelType: ModelTypeEnum.llm,
model: extensionModel,
inputTokens: extensionInputTokens,
outputTokens: extensionOutputTokens
@@ -64,7 +62,6 @@ export const pushGenerateVectorUsage = ({
deepSearchModelName: ''
};
const { totalPoints, modelName } = formatModelChars2Points({
- modelType: ModelTypeEnum.llm,
model: deepSearchModel,
inputTokens: deepSearchInputTokens,
outputTokens: deepSearchOutputTokens
@@ -145,8 +142,7 @@ export const pushQuestionGuideUsage = ({
const { totalPoints, modelName } = formatModelChars2Points({
inputTokens,
outputTokens,
- model,
- modelType: ModelTypeEnum.llm
+ model
});
createUsage({
@@ -184,8 +180,7 @@ export const pushAudioSpeechUsage = ({
}) => {
const { totalPoints, modelName } = formatModelChars2Points({
model,
- inputTokens: charsLength,
- modelType: ModelTypeEnum.tts
+ inputTokens: charsLength
});
createUsage({
@@ -221,7 +216,6 @@ export const pushWhisperUsage = ({
const { totalPoints, modelName } = formatModelChars2Points({
model: whisperModel.model,
inputTokens: duration,
- modelType: ModelTypeEnum.stt,
multiple: 60
});
@@ -259,8 +253,7 @@ export const pushRerankUsage = ({
}) => {
const { totalPoints, modelName } = formatModelChars2Points({
model,
- inputTokens,
- modelType: ModelTypeEnum.rerank
+ inputTokens
});
createUsage({
diff --git a/projects/app/src/web/common/utils/voice.ts b/projects/app/src/web/common/utils/voice.ts
index bbc0cce2f5ce..2791bc761d26 100644
--- a/projects/app/src/web/common/utils/voice.ts
+++ b/projects/app/src/web/common/utils/voice.ts
@@ -111,9 +111,9 @@ export const useAudioPlay = (
/* Perform a voice playback */
const playAudioByText = useCallback(
async ({ text, buffer }: { text: string; buffer?: Uint8Array }) => {
- const playAudioBuffer = (buffer: Uint8Array) => {
+ const playAudioBuffer = (audioBuffer: Uint8Array) => {
if (!audioRef.current) return;
- const audioUrl = URL.createObjectURL(new Blob([buffer], { type: contentType }));
+ const audioUrl = URL.createObjectURL(new Blob([audioBuffer], { type: contentType }));
audioRef.current.src = audioUrl;
audioRef.current.play();
};
diff --git a/projects/app/src/web/core/app/api.ts b/projects/app/src/web/core/app/api.ts
index 7d2a795e31bd..4ada63bff1f7 100644
--- a/projects/app/src/web/core/app/api.ts
+++ b/projects/app/src/web/core/app/api.ts
@@ -1,5 +1,5 @@
import { GET, POST, DELETE, PUT } from '@/web/common/api/request';
-import type { AppDetailType, AppListItemType } from '@fastgpt/global/core/app/type.d';
+import type { AppDetailType, AppListItemType, AppSchema } from '@fastgpt/global/core/app/type.d';
import type { AppUpdateParams, AppChangeOwnerBody } from '@/global/core/app/api';
import type { CreateAppBody } from '@/pages/api/core/app/create';
import type { ListAppBody } from '@/pages/api/core/app/list';
diff --git a/projects/app/src/web/core/chat/api.ts b/projects/app/src/web/core/chat/api.ts
index 76fe33af4299..8ea36e92fc15 100644
--- a/projects/app/src/web/core/chat/api.ts
+++ b/projects/app/src/web/core/chat/api.ts
@@ -1,8 +1,12 @@
import { GET, POST, DELETE, PUT } from '@/web/common/api/request';
import type { ChatHistoryItemType, ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
import type {
+ ChatFavouriteTagType,
ChatSettingSchema,
- ChatSettingUpdateParams
+ ChatSettingUpdateParams,
+ QuickAppType,
+ SelectedToolType,
+ ChatSettingReturnType
} from '@fastgpt/global/core/chat/setting/type';
import type { getResDataQuery } from '@/pages/api/core/chat/getResData';
import type {
@@ -34,6 +38,11 @@ import type {
GetCollectionQuoteProps,
GetCollectionQuoteRes
} from '@/pages/api/core/chat/quote/getCollectionQuote';
+import type {
+ ChatFavouriteAppUpdateParams,
+ ChatFavouriteAppSchema,
+ ChatFavouriteApp
+} from '@fastgpt/global/core/chat/favouriteApp/type';
/**
* 获取初始化聊天内容
@@ -113,9 +122,29 @@ export const getCollectionQuote = (data: GetCollectionQuoteProps) =>
/*---------- chat setting ------------*/
export const getChatSetting = () => {
- return GET('/proApi/core/chat/setting/detail');
+ return GET('/proApi/core/chat/setting/detail');
};
export const updateChatSetting = (data: ChatSettingUpdateParams) => {
return POST('/proApi/core/chat/setting/update', data);
};
+
+export const getFavouriteApps = (data?: { name?: string; tag?: string }) => {
+ return GET('/proApi/core/chat/setting/favourite/list', data);
+};
+
+export const updateFavouriteApps = (data: ChatFavouriteAppUpdateParams[]) => {
+ return POST('/proApi/core/chat/setting/favourite/update', data);
+};
+
+export const updateFavouriteAppOrder = (data: { id: string; order: number }[]) => {
+ return PUT('/proApi/core/chat/setting/favourite/order', data);
+};
+
+export const updateFavouriteAppTags = (data: { id: string; tags: string[] }[]) => {
+ return PUT('/proApi/core/chat/setting/favourite/tags', data);
+};
+
+export const deleteFavouriteApp = (id: string) => {
+ return DELETE(`/proApi/core/chat/setting/favourite/delete?id=${id}`);
+};
diff --git a/projects/app/src/web/core/chat/context/chatRecordContext.tsx b/projects/app/src/web/core/chat/context/chatRecordContext.tsx
index 1778899f1aa6..fc932e72059e 100644
--- a/projects/app/src/web/core/chat/context/chatRecordContext.tsx
+++ b/projects/app/src/web/core/chat/context/chatRecordContext.tsx
@@ -11,6 +11,7 @@ import { getNanoid } from '@fastgpt/global/common/string/tools';
import { type BoxProps } from '@chakra-ui/react';
type ChatRecordContextType = {
+ isLoadingRecords: boolean;
chatRecords: ChatSiteItemType[];
setChatRecords: React.Dispatch>;
isChatRecordsLoaded: boolean;
@@ -25,6 +26,7 @@ type ChatRecordContextType = {
};
export const ChatRecordContext = createContext({
+ isLoadingRecords: false,
chatRecords: [],
setChatRecords: function (value: React.SetStateAction): void {
throw new Error('Function not implemented.');
@@ -60,7 +62,8 @@ const ChatRecordContextProvider = ({
data: chatRecords,
ScrollData,
setData: setChatRecords,
- total: totalRecordsCount
+ total: totalRecordsCount,
+ isLoading
} = useScrollPagination(
async (data: getPaginationRecordsBody): Promise> => {
setIsChatRecordsLoaded(false);
@@ -100,13 +103,14 @@ const ChatRecordContextProvider = ({
const contextValue = useMemo(() => {
return {
+ isLoadingRecords: isLoading,
chatRecords,
setChatRecords,
totalRecordsCount,
ScrollData,
isChatRecordsLoaded
};
- }, [ScrollData, chatRecords, setChatRecords, totalRecordsCount, isChatRecordsLoaded]);
+ }, [isLoading, chatRecords, setChatRecords, totalRecordsCount, ScrollData, isChatRecordsLoaded]);
return {children};
};
diff --git a/projects/app/src/web/core/chat/context/chatSettingContext.tsx b/projects/app/src/web/core/chat/context/chatSettingContext.tsx
index 7b6493c0f537..820210f09a66 100644
--- a/projects/app/src/web/core/chat/context/chatSettingContext.tsx
+++ b/projects/app/src/web/core/chat/context/chatSettingContext.tsx
@@ -1,4 +1,5 @@
import { useSystemStore } from '@/web/common/system/useSystemStore';
+import type { ChatSettingTabOptionEnum } from '@/pageComponents/chat/constants';
import {
ChatSidebarPaneEnum,
defaultCollapseStatus,
@@ -6,21 +7,25 @@ import {
} from '@/pageComponents/chat/constants';
import { getChatSetting } from '@/web/core/chat/api';
import { useChatStore } from '@/web/core/chat/context/useChatStore';
-import type { ChatSettingSchema } from '@fastgpt/global/core/chat/setting/type';
+import type {
+ ChatSettingReturnType,
+ ChatSettingSchema
+} from '@fastgpt/global/core/chat/setting/type';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { useRouter } from 'next/router';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { createContext } from 'use-context-selector';
-import { usePathname } from 'next/navigation';
-
-type ChatSettingReturnType = ChatSettingSchema | undefined;
export type ChatSettingContextValue = {
pane: ChatSidebarPaneEnum;
- handlePaneChange: (pane: ChatSidebarPaneEnum, _id?: string) => void;
+ handlePaneChange: (
+ pane: ChatSidebarPaneEnum,
+ _id?: string,
+ _tab?: ChatSettingTabOptionEnum
+ ) => void;
collapse: CollapseStatusType;
onTriggerCollapse: () => void;
- chatSettings: ChatSettingSchema | undefined;
+ chatSettings?: ChatSettingReturnType;
refreshChatSetting: () => Promise;
logos: Pick;
};
@@ -42,7 +47,6 @@ export const ChatSettingContext = createContext({
export const ChatSettingContextProvider = ({ children }: { children: React.ReactNode }) => {
const router = useRouter();
- const pathname = usePathname();
const { feConfigs } = useSystemStore();
const { appId, setLastPane, setLastChatAppId, lastPane } = useChatStore();
@@ -60,11 +64,14 @@ export const ChatSettingContextProvider = ({ children }: { children: React.React
{
manual: false,
refreshDeps: [feConfigs.isPlus],
- onSuccess(data) {
+ onSuccess: (data) => {
if (!data) return;
- // Reset home page appId
- if (pane === ChatSidebarPaneEnum.HOME && appId !== data.appId) {
+ if (
+ pane === ChatSidebarPaneEnum.HOME &&
+ appId !== data.appId &&
+ data.quickAppList.every((q) => q._id !== appId)
+ ) {
handlePaneChange(ChatSidebarPaneEnum.HOME, data.appId);
}
}
@@ -72,8 +79,8 @@ export const ChatSettingContextProvider = ({ children }: { children: React.React
);
const handlePaneChange = useCallback(
- async (newPane: ChatSidebarPaneEnum, id?: string) => {
- if (newPane === pane && !id) return;
+ async (newPane: ChatSidebarPaneEnum, id?: string, tab?: ChatSettingTabOptionEnum) => {
+ if (newPane === pane && !id && !tab) return;
const _id = (() => {
if (id) return id;
@@ -90,7 +97,8 @@ export const ChatSettingContextProvider = ({ children }: { children: React.React
query: {
...router.query,
appId: _id,
- pane: newPane
+ pane: newPane,
+ tab
}
});
diff --git a/projects/app/src/web/core/chat/context/useChatStore.ts b/projects/app/src/web/core/chat/context/useChatStore.ts
index dba5e90592ae..de2ed5ce5209 100644
--- a/projects/app/src/web/core/chat/context/useChatStore.ts
+++ b/projects/app/src/web/core/chat/context/useChatStore.ts
@@ -186,6 +186,7 @@ const createStorageListener = (store: any) => {
return () => {};
};
+
// 初始化存储事件监听器
if (typeof window !== 'undefined') {
createStorageListener(useChatStore);
diff --git a/scripts/openapi/package.json b/scripts/openapi/package.json
index 92ad992ccc14..4c77b65f4947 100644
--- a/scripts/openapi/package.json
+++ b/scripts/openapi/package.json
@@ -10,7 +10,7 @@
"@types/babel__traverse": "^7.20.6"
},
"peerDependencies": {
- "typescript": "^5.0.0"
+ "typescript": "^5.1.3"
},
"dependencies": {
"@babel/generator": "^7.25.6",
diff --git a/test/cases/service/core/ai/llm/toolCall.test.ts b/test/cases/service/core/ai/llm/toolCall.test.ts
new file mode 100644
index 000000000000..60d7bc1a036c
--- /dev/null
+++ b/test/cases/service/core/ai/llm/toolCall.test.ts
@@ -0,0 +1,935 @@
+import {
+ parsePromptToolCall,
+ promptToolCallMessageRewrite
+} from '@fastgpt/service/core/ai/llm/promptToolCall';
+import type { ChatCompletionMessageParam, ChatCompletionTool } from '@fastgpt/global/core/ai/type';
+import { describe, expect, it } from 'vitest';
+
+describe('parsePromptToolCall function tests', () => {
+ describe('Basic scenarios', () => {
+ it('should return answer when input starts with 0:', () => {
+ const input = '0: This is a regular response';
+ const result = parsePromptToolCall(input);
+
+ expect(result).toEqual({
+ answer: 'This is a regular response'
+ });
+ });
+
+ it('should return answer when input starts with 0:(Chinese colon)', () => {
+ const input = '0:This is a regular response with Chinese colon';
+ const result = parsePromptToolCall(input);
+
+ expect(result).toEqual({
+ answer: 'This is a regular response with Chinese colon'
+ });
+ });
+
+ it('should return trimmed answer when input starts with 0: and has extra whitespace', () => {
+ const input = ' 0: This is a response with whitespace ';
+ const result = parsePromptToolCall(input);
+
+ expect(result).toEqual({
+ answer: 'This is a response with whitespace'
+ });
+ });
+
+ it('should handle 0: in the middle of string when within first 6 characters', () => {
+ const input = 'Pre 0: This is the actual response';
+ const result = parsePromptToolCall(input);
+
+ expect(result).toEqual({
+ answer: 'This is the actual response'
+ });
+ });
+
+ it('should not process 0: when beyond first 6 characters', () => {
+ const input = 'Long prefix 0: This should not be processed';
+ const result = parsePromptToolCall(input);
+
+ expect(result).toEqual({
+ answer: 'Long prefix 0: This should not be processed'
+ });
+ });
+
+ it('should return original string when no 0: prefix found and no tool call', () => {
+ const input = 'This is just a regular string without any prefixes';
+ const result = parsePromptToolCall(input);
+
+ expect(result).toEqual({
+ answer: 'This is just a regular string without any prefixes'
+ });
+ });
+
+ it('should parse valid tool call with 1:', () => {
+ const input = '1: {"name": "get_weather", "arguments": {"location": "Tokyo"}}';
+ const result = parsePromptToolCall(input);
+
+ expect(result.answer).toBe('');
+ expect(result.toolCalls).toHaveLength(1);
+ expect(result.toolCalls![0].function.name).toBe('get_weather');
+ expect(result.toolCalls![0].function.arguments).toBe('{"location":"Tokyo"}');
+ expect(result.toolCalls![0].type).toBe('function');
+ expect(result.toolCalls![0].id).toBeDefined();
+ expect(typeof result.toolCalls![0].id).toBe('string');
+ });
+
+ it('should parse valid tool call with 1:(Chinese colon)', () => {
+ const input = '1:{"name": "calculate", "arguments": {"expression": "2+2"}}';
+ const result = parsePromptToolCall(input);
+
+ expect(result.answer).toBe('');
+ expect(result.toolCalls).toHaveLength(1);
+ expect(result.toolCalls![0].function.name).toBe('calculate');
+ expect(result.toolCalls![0].function.arguments).toBe('{"expression":"2+2"}');
+ });
+ });
+
+ describe('Tool call parsing', () => {
+ it('should handle tool call with nested object arguments', () => {
+ const input =
+ '1: {"name": "complex_tool", "arguments": {"user": {"name": "John", "age": 30}, "settings": {"verbose": true}}}';
+ const result = parsePromptToolCall(input);
+
+ expect(result.answer).toBe('');
+ expect(result.toolCalls).toHaveLength(1);
+ expect(result.toolCalls![0].function.name).toBe('complex_tool');
+ expect(JSON.parse(result.toolCalls![0].function.arguments)).toEqual({
+ user: { name: 'John', age: 30 },
+ settings: { verbose: true }
+ });
+ });
+
+ it('should handle tool call with array arguments', () => {
+ const input =
+ '1: {"name": "process_list", "arguments": {"items": [1, 2, 3], "options": ["sort", "filter"]}}';
+ const result = parsePromptToolCall(input);
+
+ expect(result.answer).toBe('');
+ expect(result.toolCalls).toHaveLength(1);
+ expect(result.toolCalls![0].function.name).toBe('process_list');
+ expect(JSON.parse(result.toolCalls![0].function.arguments)).toEqual({
+ items: [1, 2, 3],
+ options: ['sort', 'filter']
+ });
+ });
+
+ it('should handle tool call with empty arguments', () => {
+ const input = '1: {"name": "simple_tool", "arguments": {}}';
+ const result = parsePromptToolCall(input);
+
+ expect(result.answer).toBe('');
+ expect(result.toolCalls).toHaveLength(1);
+ expect(result.toolCalls![0].function.name).toBe('simple_tool');
+ expect(result.toolCalls![0].function.arguments).toBe('{}');
+ });
+
+ it('should handle tool call with extra content before and after JSON', () => {
+ const input =
+ 'Some text 1: extra {"name": "test_tool", "arguments": {"param": "value"}} more text';
+ const result = parsePromptToolCall(input);
+
+ expect(result.answer).toBe('');
+ expect(result.toolCalls).toHaveLength(1);
+ expect(result.toolCalls![0].function.name).toBe('test_tool');
+ expect(result.toolCalls![0].function.arguments).toBe('{"param":"value"}');
+ });
+ });
+
+ describe('Edge cases and error handling', () => {
+ it('should return error message for malformed JSON with 1:', () => {
+ const input = '1: {"name": "tool", "arguments": invalid json}';
+ const result = parsePromptToolCall(input);
+
+ expect(result).toEqual({
+ answer: 'Tool run error'
+ });
+ });
+
+ it('should return error message for incomplete JSON with 1:', () => {
+ const input = '1: {"name": "tool"';
+ const result = parsePromptToolCall(input);
+
+ expect(result).toEqual({
+ answer: 'Tool run error'
+ });
+ });
+
+ it('should handle empty JSON object with 1: (creates tool call with undefined properties)', () => {
+ const input = '1: {}';
+ const result = parsePromptToolCall(input);
+
+ // Empty object {} doesn't have name property, so it parses but creates invalid tool call
+ expect(result.answer).toBe('');
+ expect(result.toolCalls).toHaveLength(1);
+ expect(result.toolCalls![0].function.name).toBeUndefined();
+ });
+
+ it('should handle empty string input', () => {
+ const input = '';
+ const result = parsePromptToolCall(input);
+
+ expect(result).toEqual({
+ answer: ''
+ });
+ });
+
+ it('should handle whitespace-only input', () => {
+ const input = ' \n\t ';
+ const result = parsePromptToolCall(input);
+
+ expect(result).toEqual({
+ answer: ''
+ });
+ });
+
+ it('should handle input with only prefix', () => {
+ const input = '1:';
+ const result = parsePromptToolCall(input);
+
+ expect(result).toEqual({
+ answer: 'Tool run error'
+ });
+ });
+
+ it('should handle input with only prefix and whitespace', () => {
+ const input = '1: ';
+ const result = parsePromptToolCall(input);
+
+ expect(result).toEqual({
+ answer: 'Tool run error'
+ });
+ });
+
+ it('should handle JSON5 syntax in tool call', () => {
+ const input = "1: {name: 'test_tool', arguments: {param: 'value', number: 42}}";
+ const result = parsePromptToolCall(input);
+
+ expect(result.answer).toBe('');
+ expect(result.toolCalls).toHaveLength(1);
+ expect(result.toolCalls![0].function.name).toBe('test_tool');
+ expect(JSON.parse(result.toolCalls![0].function.arguments)).toEqual({
+ param: 'value',
+ number: 42
+ });
+ });
+
+ it('should handle tool call with simple strings (no escaping needed)', () => {
+ const input =
+ '1: {"name": "search", "arguments": {"query": "Hello world", "filter": "type:document"}}';
+ const result = parsePromptToolCall(input);
+
+ expect(result.answer).toBe('');
+ expect(result.toolCalls).toHaveLength(1);
+ expect(result.toolCalls![0].function.name).toBe('search');
+ expect(JSON.parse(result.toolCalls![0].function.arguments)).toEqual({
+ query: 'Hello world',
+ filter: 'type:document'
+ });
+ });
+
+ it('should handle input with multiple 0: occurrences - does not process if first one is beyond position 5', () => {
+ const input = 'First 0: Second part 0: Third part';
+ const result = parsePromptToolCall(input);
+
+ // The first '0:' is at position 6, which is > 5, so it's not processed
+ expect(result).toEqual({
+ answer: 'First 0: Second part 0: Third part'
+ });
+ });
+
+ it('should handle input with multiple 1: occurrences - fails to parse when extra text interferes', () => {
+ const input =
+ 'Text 1: {"name": "tool1", "arguments": {"param": "value"}} more text 1: {"name": "tool2", "arguments": {}}';
+ const result = parsePromptToolCall(input);
+
+ // The sliceJsonStr function can't properly extract JSON when there's extra text after
+ expect(result).toEqual({
+ answer: 'Tool run error'
+ });
+ });
+
+ it('should handle tool name with underscores and numbers', () => {
+ const input = '1: {"name": "get_user_data_v2", "arguments": {"user_id": 123}}';
+ const result = parsePromptToolCall(input);
+
+ expect(result.answer).toBe('');
+ expect(result.toolCalls).toHaveLength(1);
+ expect(result.toolCalls![0].function.name).toBe('get_user_data_v2');
+ expect(JSON.parse(result.toolCalls![0].function.arguments)).toEqual({
+ user_id: 123
+ });
+ });
+
+ it('should handle very long strings', () => {
+ const longString = 'A'.repeat(10000);
+ const input = `0: ${longString}`;
+ const result = parsePromptToolCall(input);
+
+ expect(result).toEqual({
+ answer: longString
+ });
+ });
+
+ it('should handle Unicode characters in tool arguments', () => {
+ const input =
+ '1: {"name": "translate", "arguments": {"text": "你好世界", "from": "zh", "to": "en"}}';
+ const result = parsePromptToolCall(input);
+
+ expect(result.answer).toBe('');
+ expect(result.toolCalls).toHaveLength(1);
+ expect(result.toolCalls![0].function.name).toBe('translate');
+ expect(JSON.parse(result.toolCalls![0].function.arguments)).toEqual({
+ text: '你好世界',
+ from: 'zh',
+ to: 'en'
+ });
+ });
+
+ it('should handle mixed Chinese and English colons', () => {
+ const input1 = '0: Answer with English colon';
+ const input2 = '0:Answer with Chinese colon';
+ const input3 = '1: {"name": "tool", "arguments": {"key": "value"}}';
+ const input4 = '1:{"name": "tool", "arguments": {"key": "value"}}';
+
+ const result1 = parsePromptToolCall(input1);
+ const result2 = parsePromptToolCall(input2);
+ const result3 = parsePromptToolCall(input3);
+ const result4 = parsePromptToolCall(input4);
+
+ expect(result1.answer).toBe('Answer with English colon');
+ expect(result2.answer).toBe('Answer with Chinese colon');
+ expect(result3.toolCalls).toHaveLength(1);
+ expect(result4.toolCalls).toHaveLength(1);
+ });
+ });
+
+ describe('Boundary conditions', () => {
+ it('should handle input with only numbers', () => {
+ const input = '12345';
+ const result = parsePromptToolCall(input);
+
+ expect(result).toEqual({
+ answer: '12345'
+ });
+ });
+
+ it('should handle tool call with null arguments', () => {
+ const input = '1: {"name": "null_test", "arguments": null}';
+ const result = parsePromptToolCall(input);
+
+ expect(result.answer).toBe('');
+ expect(result.toolCalls).toHaveLength(1);
+ expect(result.toolCalls![0].function.name).toBe('null_test');
+ expect(result.toolCalls![0].function.arguments).toBe('null');
+ });
+
+ it('should handle tool call with boolean and number values', () => {
+ const input =
+ '1: {"name": "mixed_types", "arguments": {"flag": true, "count": 0, "ratio": 3.14}}';
+ const result = parsePromptToolCall(input);
+
+ expect(result.answer).toBe('');
+ expect(result.toolCalls).toHaveLength(1);
+ expect(result.toolCalls![0].function.name).toBe('mixed_types');
+ expect(JSON.parse(result.toolCalls![0].function.arguments)).toEqual({
+ flag: true,
+ count: 0,
+ ratio: 3.14
+ });
+ });
+
+ it('should handle newlines in input - 0: beyond position limit', () => {
+ const input = 'Line 1\n0: Line 2\nLine 3';
+ const result = parsePromptToolCall(input);
+
+ // The '0:' appears after position 6, so it's not processed
+ expect(result).toEqual({
+ answer: 'Line 1\n0: Line 2\nLine 3'
+ });
+ });
+
+ it('should handle tabs and special whitespace', () => {
+ const input = '\t0:\tThis\thas\ttabs\t';
+ const result = parsePromptToolCall(input);
+
+ expect(result).toEqual({
+ answer: 'This\thas\ttabs'
+ });
+ });
+
+ it('should not process 0: when it appears after position 5', () => {
+ const input = 'Longer prefix 0: This should not be processed';
+ const result = parsePromptToolCall(input);
+
+ expect(result).toEqual({
+ answer: 'Longer prefix 0: This should not be processed'
+ });
+ });
+
+ it('should handle 0: at exactly position 5', () => {
+ const input = '12345 0: Should not be processed';
+ const result = parsePromptToolCall(input);
+
+ expect(result).toEqual({
+ answer: '12345 0: Should not be processed'
+ });
+ });
+
+ it('should handle Chinese colon priority (only when English colon not found)', () => {
+ const input = '0:Chinese colon without English';
+ const result = parsePromptToolCall(input);
+
+ expect(result).toEqual({
+ answer: 'Chinese colon without English'
+ });
+ });
+
+ it('should prioritize English colon over Chinese colon - but not when beyond position limit', () => {
+ const input = '0: Chinese 0: English colon';
+ const result = parsePromptToolCall(input);
+
+ // The English '0:' is at position 11, beyond the limit, so returns original string
+ expect(result).toEqual({
+ answer: '0: Chinese 0: English colon'
+ });
+ });
+
+ it('should handle valid 0: within newline constraints', () => {
+ const input = '0: Line with proper prefix';
+ const result = parsePromptToolCall(input);
+
+ expect(result).toEqual({
+ answer: 'Line with proper prefix'
+ });
+ });
+
+ it('should handle simple 1: tool call that works', () => {
+ const input = '1: {"name": "tool1", "arguments": {"param": "value"}}';
+ const result = parsePromptToolCall(input);
+
+ expect(result.answer).toBe('');
+ expect(result.toolCalls).toHaveLength(1);
+ expect(result.toolCalls![0].function.name).toBe('tool1');
+ expect(JSON.parse(result.toolCalls![0].function.arguments)).toEqual({
+ param: 'value'
+ });
+ });
+ });
+});
+
+describe('promptToolCallMessageRewrite function tests', () => {
+ describe('System message handling', () => {
+ it('should add system message when none exists', () => {
+ const messages: ChatCompletionMessageParam[] = [{ role: 'user', content: 'Hello' }];
+ const tools: ChatCompletionTool[] = [
+ {
+ type: 'function',
+ function: {
+ name: 'get_weather',
+ description: 'Get weather info',
+ parameters: { type: 'object', properties: {} }
+ }
+ }
+ ];
+
+ const result = promptToolCallMessageRewrite(messages, tools);
+
+ expect(result).toHaveLength(2);
+ expect(result[0].role).toBe('system');
+ expect(result[0].content).toContain('你是一个智能机器人');
+ expect(result[0].content).toContain('get_weather');
+ expect(result[1]).toEqual({ role: 'user', content: 'Hello' });
+ });
+
+ it('should update existing string system message', () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: 'system', content: 'You are helpful' },
+ { role: 'user', content: 'Hello' }
+ ];
+ const tools: ChatCompletionTool[] = [
+ {
+ type: 'function',
+ function: {
+ name: 'calculator',
+ description: 'Calculate math',
+ parameters: { type: 'object', properties: {} }
+ }
+ }
+ ];
+
+ const result = promptToolCallMessageRewrite(messages, tools);
+
+ expect(result).toHaveLength(2);
+ expect(result[0].role).toBe('system');
+ expect(result[0].content).toContain('You are helpful');
+ expect(result[0].content).toContain('你是一个智能机器人');
+ expect(result[0].content).toContain('calculator');
+ });
+
+ it('should update existing array system message', () => {
+ const messages: ChatCompletionMessageParam[] = [
+ {
+ role: 'system',
+ content: [{ type: 'text', text: 'You are helpful' }]
+ },
+ { role: 'user', content: 'Hello' }
+ ];
+ const tools: ChatCompletionTool[] = [
+ {
+ type: 'function',
+ function: {
+ name: 'search',
+ description: 'Search tool',
+ parameters: { type: 'object', properties: {} }
+ }
+ }
+ ];
+
+ const result = promptToolCallMessageRewrite(messages, tools);
+
+ expect(result).toHaveLength(2);
+ expect(result[0].role).toBe('system');
+ expect(Array.isArray(result[0].content)).toBe(true);
+ const content = result[0].content as Array;
+ expect(content).toHaveLength(2);
+ expect(content[0]).toEqual({ type: 'text', text: 'You are helpful' });
+ expect(content[1].type).toBe('text');
+ expect(content[1].text).toContain('你是一个智能机器人');
+ expect(content[1].text).toContain('search');
+ });
+
+ it('should throw error for invalid system message content', () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: 'system', content: null as any },
+ { role: 'user', content: 'Hello' }
+ ];
+ const tools: ChatCompletionTool[] = [
+ {
+ type: 'function',
+ function: {
+ name: 'test_tool',
+ description: 'Test',
+ parameters: { type: 'object', properties: {} }
+ }
+ }
+ ];
+
+ expect(() => promptToolCallMessageRewrite(messages, tools)).toThrow(
+ 'Prompt call invalid input'
+ );
+ });
+
+ it('should handle multiple tools in system message', () => {
+ const messages: ChatCompletionMessageParam[] = [{ role: 'user', content: 'Hello' }];
+ const tools: ChatCompletionTool[] = [
+ {
+ type: 'function',
+ function: {
+ name: 'tool1',
+ description: 'First tool',
+ parameters: { type: 'object', properties: { param1: { type: 'string' } } }
+ }
+ },
+ {
+ type: 'function',
+ function: {
+ name: 'tool2',
+ description: 'Second tool',
+ parameters: { type: 'object', properties: { param2: { type: 'number' } } }
+ }
+ }
+ ];
+
+ const result = promptToolCallMessageRewrite(messages, tools);
+
+ expect(result[0].content).toContain('tool1');
+ expect(result[0].content).toContain('tool2');
+ expect(result[0].content).toContain('First tool');
+ expect(result[0].content).toContain('Second tool');
+ });
+ });
+
+ describe('Assistant message rewriting', () => {
+ it('should rewrite assistant message with string content', () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: 'user', content: 'Hello' },
+ { role: 'assistant', content: 'Hi there!' }
+ ];
+ const tools: ChatCompletionTool[] = [];
+
+ const result = promptToolCallMessageRewrite(messages, tools);
+
+ expect(result[2].role).toBe('assistant');
+ expect(result[2].content).toBe('0: Hi there!');
+ });
+
+ it('should rewrite assistant message with tool calls', () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: 'user', content: 'What is the weather?' },
+ {
+ role: 'assistant',
+ content: null,
+ tool_calls: [
+ {
+ id: 'call_123',
+ type: 'function',
+ function: {
+ name: 'get_weather',
+ arguments: '{"location": "Tokyo"}'
+ }
+ }
+ ]
+ }
+ ];
+ const tools: ChatCompletionTool[] = [];
+
+ const result = promptToolCallMessageRewrite(messages, tools);
+
+ expect(result[2].role).toBe('assistant');
+ expect(result[2].content).toBe(
+ '1: {"name":"get_weather","arguments":"{\\"location\\": \\"Tokyo\\"}"}'
+ );
+ expect(result[2]).not.toHaveProperty('tool_calls');
+ });
+
+ it('should skip assistant message with no content and no tool calls', () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: 'user', content: 'Hello' },
+ { role: 'assistant', content: null }
+ ];
+ const tools: ChatCompletionTool[] = [];
+
+ const result = promptToolCallMessageRewrite(messages, tools);
+
+ expect(result[2].role).toBe('assistant');
+ expect(result[2].content).toBeNull();
+ });
+
+ it('should handle assistant message with multiple tool calls (only first one used)', () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: 'user', content: 'Hello' },
+ {
+ role: 'assistant',
+ content: null,
+ tool_calls: [
+ {
+ id: 'call_1',
+ type: 'function',
+ function: { name: 'tool1', arguments: '{"param": "value1"}' }
+ },
+ {
+ id: 'call_2',
+ type: 'function',
+ function: { name: 'tool2', arguments: '{"param": "value2"}' }
+ }
+ ]
+ }
+ ];
+ const tools: ChatCompletionTool[] = [];
+
+ const result = promptToolCallMessageRewrite(messages, tools);
+
+ expect(result[2].content).toBe(
+ '1: {"name":"tool1","arguments":"{\\"param\\": \\"value1\\"}"}'
+ );
+ expect(result[2]).not.toHaveProperty('tool_calls');
+ });
+ });
+
+ describe('Tool message rewriting', () => {
+ it('should convert tool message to user message', () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: 'user', content: 'What is the weather?' },
+ {
+ role: 'tool',
+ tool_call_id: 'call_123',
+ content: 'The weather is sunny'
+ }
+ ];
+ const tools: ChatCompletionTool[] = [];
+
+ const result = promptToolCallMessageRewrite(messages, tools);
+
+ expect(result[2].role).toBe('user');
+ expect(result[2].content).toBe('\nThe weather is sunny\n');
+ expect(result[2]).not.toHaveProperty('tool_call_id');
+ });
+
+ it('should handle multiple tool messages', () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: 'user', content: 'Hello' },
+ {
+ role: 'tool',
+ tool_call_id: 'call_1',
+ content: 'Result 1'
+ },
+ {
+ role: 'tool',
+ tool_call_id: 'call_2',
+ content: 'Result 2'
+ }
+ ];
+ const tools: ChatCompletionTool[] = [];
+
+ const result = promptToolCallMessageRewrite(messages, tools);
+
+ expect(result[2].role).toBe('user');
+ expect(result[2].content).toBe('\nResult 1\n');
+ expect(result[3].role).toBe('user');
+ expect(result[3].content).toBe('\nResult 2\n');
+ });
+
+ it('should handle tool message with complex content', () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: 'user', content: 'Test' },
+ {
+ role: 'tool',
+ tool_call_id: 'call_123',
+ content: JSON.stringify({ result: 'success', data: [1, 2, 3] })
+ }
+ ];
+ const tools: ChatCompletionTool[] = [];
+
+ const result = promptToolCallMessageRewrite(messages, tools);
+
+ expect(result[2].role).toBe('user');
+ expect(result[2].content).toBe(
+ '\n{"result":"success","data":[1,2,3]}\n'
+ );
+ });
+ });
+
+ describe('Message immutability', () => {
+ it('should not mutate original messages', () => {
+ const originalMessages: ChatCompletionMessageParam[] = [
+ { role: 'user', content: 'Hello' },
+ { role: 'assistant', content: 'Hi there!' },
+ {
+ role: 'tool',
+ tool_call_id: 'call_123',
+ content: 'Tool result'
+ }
+ ];
+ const tools: ChatCompletionTool[] = [
+ {
+ type: 'function',
+ function: {
+ name: 'test_tool',
+ description: 'Test',
+ parameters: { type: 'object', properties: {} }
+ }
+ }
+ ];
+
+ const originalMessagesCopy = JSON.parse(JSON.stringify(originalMessages));
+ promptToolCallMessageRewrite(originalMessages, tools);
+
+ expect(originalMessages).toEqual(originalMessagesCopy);
+ });
+
+ it('should handle deeply nested message content without mutation', () => {
+ const originalMessages: ChatCompletionMessageParam[] = [
+ {
+ role: 'system',
+ content: [{ type: 'text', text: 'Original system message' }]
+ },
+ { role: 'user', content: 'Hello' }
+ ];
+ const tools: ChatCompletionTool[] = [
+ {
+ type: 'function',
+ function: {
+ name: 'test_tool',
+ description: 'Test',
+ parameters: { type: 'object', properties: {} }
+ }
+ }
+ ];
+
+ const originalMessagesCopy = JSON.parse(JSON.stringify(originalMessages));
+ promptToolCallMessageRewrite(originalMessages, tools);
+
+ expect(originalMessages).toEqual(originalMessagesCopy);
+ });
+ });
+
+ describe('Complex conversation flows', () => {
+ it('should handle complete conversation with all message types', () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: 'system', content: 'You are helpful' },
+ { role: 'user', content: 'What is the weather in Tokyo?' },
+ {
+ role: 'assistant',
+ content: null,
+ tool_calls: [
+ {
+ id: 'call_123',
+ type: 'function',
+ function: {
+ name: 'get_weather',
+ arguments: '{"location": "Tokyo"}'
+ }
+ }
+ ]
+ },
+ {
+ role: 'tool',
+ tool_call_id: 'call_123',
+ content: 'The weather in Tokyo is sunny, 25°C'
+ },
+ { role: 'assistant', content: 'The weather in Tokyo is sunny with a temperature of 25°C.' }
+ ];
+ const tools: ChatCompletionTool[] = [
+ {
+ type: 'function',
+ function: {
+ name: 'get_weather',
+ description: 'Get current weather',
+ parameters: {
+ type: 'object',
+ properties: {
+ location: { type: 'string', description: 'City name' }
+ },
+ required: ['location']
+ }
+ }
+ }
+ ];
+
+ const result = promptToolCallMessageRewrite(messages, tools);
+
+ expect(result).toHaveLength(5);
+
+ // System message should be updated
+ expect(result[0].role).toBe('system');
+ expect(result[0].content).toContain('You are helpful');
+ expect(result[0].content).toContain('get_weather');
+
+ // User message unchanged
+ expect(result[1]).toEqual({ role: 'user', content: 'What is the weather in Tokyo?' });
+
+ // Assistant with tool call should be rewritten
+ expect(result[2].role).toBe('assistant');
+ expect(result[2].content).toBe(
+ '1: {"name":"get_weather","arguments":"{\\"location\\": \\"Tokyo\\"}"}'
+ );
+ expect(result[2]).not.toHaveProperty('tool_calls');
+
+ // Tool message should become user message
+ expect(result[3].role).toBe('user');
+ expect(result[3].content).toBe(
+ '\nThe weather in Tokyo is sunny, 25°C\n'
+ );
+
+ // Final assistant message should be prefixed
+ expect(result[4].role).toBe('assistant');
+ expect(result[4].content).toBe(
+ '0: The weather in Tokyo is sunny with a temperature of 25°C.'
+ );
+ });
+
+ it('should handle empty messages array', () => {
+ const messages: ChatCompletionMessageParam[] = [];
+ const tools: ChatCompletionTool[] = [
+ {
+ type: 'function',
+ function: {
+ name: 'test_tool',
+ description: 'Test',
+ parameters: { type: 'object', properties: {} }
+ }
+ }
+ ];
+
+ const result = promptToolCallMessageRewrite(messages, tools);
+
+ expect(result).toHaveLength(1);
+ expect(result[0].role).toBe('system');
+ expect(result[0].content).toContain('你是一个智能机器人');
+ expect(result[0].content).toContain('test_tool');
+ });
+
+ it('should handle empty tools array', () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: 'user', content: 'Hello' },
+ { role: 'assistant', content: 'Hi there!' }
+ ];
+ const tools: ChatCompletionTool[] = [];
+
+ const result = promptToolCallMessageRewrite(messages, tools);
+
+ expect(result).toHaveLength(3);
+ expect(result[0].role).toBe('system');
+ expect(result[0].content).toContain('你是一个智能机器人');
+ expect(result[0].content).toContain('[]'); // Empty tools array in JSON
+ expect(result[2].content).toBe('0: Hi there!');
+ });
+ });
+
+ describe('Edge cases', () => {
+ it('should handle assistant message with empty string content', () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: 'user', content: 'Hello' },
+ { role: 'assistant', content: '' }
+ ];
+ const tools: ChatCompletionTool[] = [];
+
+ const result = promptToolCallMessageRewrite(messages, tools);
+
+ expect(result[2].role).toBe('assistant');
+ expect(result[2].content).toBe(''); // Empty string is falsy, so not processed
+ });
+
+ it('should handle tool message with empty content', () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: 'user', content: 'Hello' },
+ {
+ role: 'tool',
+ tool_call_id: 'call_123',
+ content: ''
+ }
+ ];
+ const tools: ChatCompletionTool[] = [];
+
+ const result = promptToolCallMessageRewrite(messages, tools);
+
+ expect(result[2].role).toBe('user');
+ expect(result[2].content).toBe('\n\n');
+ });
+
+ it('should handle mixed message types in sequence', () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: 'user', content: 'Hello' },
+ { role: 'assistant', content: 'Hi!' },
+ { role: 'user', content: 'How are you?' },
+ {
+ role: 'assistant',
+ content: null,
+ tool_calls: [
+ {
+ id: 'call_1',
+ type: 'function',
+ function: { name: 'check_status', arguments: '{}' }
+ }
+ ]
+ },
+ {
+ role: 'tool',
+ tool_call_id: 'call_1',
+ content: 'Status: OK'
+ }
+ ];
+ const tools: ChatCompletionTool[] = [];
+
+ const result = promptToolCallMessageRewrite(messages, tools);
+
+ expect(result).toHaveLength(6); // system + 5 original
+ expect(result[1]).toEqual({ role: 'user', content: 'Hello' });
+ expect(result[2].content).toBe('0: Hi!');
+ expect(result[3]).toEqual({ role: 'user', content: 'How are you?' });
+ expect(result[4].content).toBe('1: {"name":"check_status","arguments":"{}"}');
+ expect(result[5].content).toBe('\nStatus: OK\n');
+ });
+ });
+});
diff --git a/test/cases/service/core/ai/llm/utils.test.ts b/test/cases/service/core/ai/llm/utils.test.ts
new file mode 100644
index 000000000000..858f428bc23f
--- /dev/null
+++ b/test/cases/service/core/ai/llm/utils.test.ts
@@ -0,0 +1,862 @@
+import {
+ loadRequestMessages,
+ filterGPTMessageByMaxContext
+} from '@fastgpt/service/core/ai/llm/utils';
+import type { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
+import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants';
+import { describe, expect, it, vi, beforeEach } from 'vitest';
+
+// Mock external dependencies
+vi.mock('@fastgpt/service/common/string/tiktoken/index', () => ({
+ countGptMessagesTokens: vi.fn()
+}));
+
+vi.mock('@fastgpt/service/common/file/image/utils', () => ({
+ getImageBase64: vi.fn()
+}));
+
+vi.mock('@fastgpt/web/i18n/utils', () => ({
+ i18nT: vi.fn((key: string) => key)
+}));
+
+vi.mock('@fastgpt/service/common/system/log', () => ({
+ addLog: {
+ info: vi.fn(),
+ warn: vi.fn()
+ }
+}));
+
+vi.mock('axios', () => ({
+ default: {
+ head: vi.fn()
+ }
+}));
+
+import { countGptMessagesTokens } from '@fastgpt/service/common/string/tiktoken/index';
+import { getImageBase64 } from '@fastgpt/service/common/file/image/utils';
+import { addLog } from '@fastgpt/service/common/system/log';
+
+// @ts-ignore
+import axios from 'axios';
+
+const mockCountGptMessagesTokens = vi.mocked(countGptMessagesTokens);
+const mockGetImageBase64 = vi.mocked(getImageBase64);
+const mockAxiosHead = vi.mocked(axios.head);
+
+describe('filterGPTMessageByMaxContext function tests', () => {
+ beforeEach(() => {
+ vi.clearAllMocks();
+ mockCountGptMessagesTokens.mockResolvedValue(10);
+ });
+
+ describe('Basic filtering scenarios', () => {
+ it('should return empty array for invalid input', async () => {
+ const result = await filterGPTMessageByMaxContext({
+ messages: null as any,
+ maxContext: 1000
+ });
+ expect(result).toEqual([]);
+
+ const result2 = await filterGPTMessageByMaxContext({
+ messages: undefined as any,
+ maxContext: 1000
+ });
+ expect(result2).toEqual([]);
+ });
+
+ it('should return messages unchanged when less than 4 messages', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.System, content: 'You are helpful' },
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'Hello' },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: 'Hi there!' }
+ ];
+
+ const result = await filterGPTMessageByMaxContext({
+ messages,
+ maxContext: 1000
+ });
+
+ expect(result).toEqual(messages);
+ });
+
+ it('should return only system prompts when no chat prompts exist', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.System, content: 'System prompt 1' },
+ { role: ChatCompletionRequestMessageRoleEnum.System, content: 'System prompt 2' }
+ ];
+
+ const result = await filterGPTMessageByMaxContext({
+ messages,
+ maxContext: 1000
+ });
+
+ expect(result).toEqual(messages);
+ });
+ });
+
+ describe('System and chat prompt separation', () => {
+ it('should correctly separate system prompts from chat prompts', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.System, content: 'System 1' },
+ { role: ChatCompletionRequestMessageRoleEnum.System, content: 'System 2' },
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'User 1' },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: 'Assistant 1' },
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'User 2' }
+ ];
+
+ mockCountGptMessagesTokens
+ .mockResolvedValueOnce(20) // system prompts
+ .mockResolvedValueOnce(30) // user 2
+ .mockResolvedValueOnce(25) // assistant 1 + user 1
+ .mockResolvedValueOnce(15); // user 1
+
+ const result = await filterGPTMessageByMaxContext({
+ messages,
+ maxContext: 1000
+ });
+
+ expect(result).toHaveLength(5);
+ expect(
+ result.slice(0, 2).every((msg) => msg.role === ChatCompletionRequestMessageRoleEnum.System)
+ ).toBe(true);
+ expect(
+ result.slice(2).every((msg) => msg.role !== ChatCompletionRequestMessageRoleEnum.System)
+ ).toBe(true);
+ });
+ });
+
+ describe('Context limiting behavior', () => {
+ it('should filter out messages when context limit is exceeded', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.System, content: 'System' },
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'User 1' },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: 'Assistant 1' },
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'User 2' },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: 'Assistant 2' },
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'User 3' }
+ ];
+
+ mockCountGptMessagesTokens
+ .mockResolvedValueOnce(50) // system prompts
+ .mockResolvedValueOnce(60) // user 3 (exceeds remaining context)
+ .mockResolvedValueOnce(40); // assistant 2 + user 2
+
+ const result = await filterGPTMessageByMaxContext({
+ messages,
+ maxContext: 100
+ });
+
+ // Should keep system + last complete conversation that fits
+ expect(result).toHaveLength(2);
+ expect(result[0].role).toBe(ChatCompletionRequestMessageRoleEnum.System);
+ expect(result[1].content).toBe('User 3');
+ });
+
+ it('should preserve at least one conversation round even if it exceeds context', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.System, content: 'System' },
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'Large user message' },
+ {
+ role: ChatCompletionRequestMessageRoleEnum.Assistant,
+ content: 'Large assistant response'
+ }
+ ];
+
+ mockCountGptMessagesTokens
+ .mockResolvedValueOnce(20) // system prompts
+ .mockResolvedValueOnce(200); // user + assistant (exceeds remaining context)
+
+ const result = await filterGPTMessageByMaxContext({
+ messages,
+ maxContext: 50
+ });
+
+ // Should still keep the conversation even though it exceeds context
+ expect(result).toHaveLength(3);
+ expect(result[1].content).toBe('Large user message');
+ expect(result[2].content).toBe('Large assistant response');
+ });
+ });
+
+ describe('Complex conversation patterns', () => {
+ it('should handle user-assistant-tool conversation pattern', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.System, content: 'System' },
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'User 1' },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: 'Assistant 1' },
+ {
+ role: ChatCompletionRequestMessageRoleEnum.Tool,
+ tool_call_id: 'call1',
+ content: 'Tool 1'
+ },
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'User 2' },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: 'Assistant 2' },
+ {
+ role: ChatCompletionRequestMessageRoleEnum.Tool,
+ tool_call_id: 'call2',
+ content: 'Tool 2'
+ },
+ {
+ role: ChatCompletionRequestMessageRoleEnum.Tool,
+ tool_call_id: 'call3',
+ content: 'Tool 3'
+ }
+ ];
+
+ mockCountGptMessagesTokens
+ .mockResolvedValueOnce(20) // system
+ .mockResolvedValueOnce(50) // last group: assistant 2 + tool 2 + tool 3 + user 2
+ .mockResolvedValueOnce(40); // previous group: assistant 1 + tool 1 + user 1
+
+ const result = await filterGPTMessageByMaxContext({
+ messages,
+ maxContext: 1000
+ });
+
+ expect(result).toHaveLength(8);
+ expect(result[0].role).toBe(ChatCompletionRequestMessageRoleEnum.System);
+ });
+
+ it('should handle multiple assistant messages in sequence', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'User 1' },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: 'Assistant 1' },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: 'Assistant 2' },
+ {
+ role: ChatCompletionRequestMessageRoleEnum.Tool,
+ tool_call_id: 'call1',
+ content: 'Tool result'
+ },
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'User 2' }
+ ];
+
+ mockCountGptMessagesTokens
+ .mockResolvedValueOnce(30) // user 2
+ .mockResolvedValueOnce(60); // assistant 1 + assistant 2 + tool + user 1
+
+ const result = await filterGPTMessageByMaxContext({
+ messages,
+ maxContext: 1000
+ });
+
+ expect(result).toHaveLength(5);
+ });
+ });
+
+ describe('Edge cases', () => {
+ it('should handle empty messages array', async () => {
+ const result = await filterGPTMessageByMaxContext({
+ messages: [],
+ maxContext: 1000
+ });
+
+ expect(result).toEqual([]);
+ });
+
+ it('should handle zero maxContext', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.System, content: 'System' },
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'User' }
+ ];
+
+ mockCountGptMessagesTokens
+ .mockResolvedValueOnce(10) // system
+ .mockResolvedValueOnce(20); // user
+
+ const result = await filterGPTMessageByMaxContext({
+ messages,
+ maxContext: 0
+ });
+
+ // Should still preserve at least one conversation
+ expect(result).toHaveLength(2);
+ });
+
+ it('should handle negative maxContext', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'User' }
+ ];
+
+ mockCountGptMessagesTokens.mockResolvedValueOnce(20);
+
+ const result = await filterGPTMessageByMaxContext({
+ messages,
+ maxContext: -100
+ });
+
+ expect(result).toHaveLength(1);
+ });
+ });
+});
+
+describe('loadRequestMessages function tests', () => {
+ beforeEach(() => {
+ vi.clearAllMocks();
+ mockGetImageBase64.mockResolvedValue({
+ completeBase64: 'data:image/png;base64,test',
+ base64: 'test',
+ mime: 'image/png'
+ });
+ mockAxiosHead.mockResolvedValue({ status: 200 });
+ });
+
+ describe('Basic message processing', () => {
+ it('should reject empty messages array', async () => {
+ await expect(
+ loadRequestMessages({
+ messages: []
+ })
+ ).rejects.toMatch('common:core.chat.error.Messages empty');
+ });
+
+ it('should process simple conversation', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.System, content: 'You are helpful' },
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'Hello' },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: 'Hi there!' }
+ ];
+
+ const result = await loadRequestMessages({ messages });
+
+ expect(result).toHaveLength(3);
+ expect(result[0].role).toBe(ChatCompletionRequestMessageRoleEnum.System);
+ expect(result[0].content).toBe('You are helpful');
+ expect(result[1].role).toBe(ChatCompletionRequestMessageRoleEnum.User);
+ expect(result[1].content).toBe('Hello');
+ expect(result[2].role).toBe(ChatCompletionRequestMessageRoleEnum.Assistant);
+ expect(result[2].content).toBe('Hi there!');
+ });
+ });
+
+ describe('System message processing', () => {
+ it('should handle string system content', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.System, content: 'System prompt' }
+ ];
+
+ const result = await loadRequestMessages({ messages });
+
+ expect(result).toHaveLength(1);
+ expect(result[0].content).toBe('System prompt');
+ });
+
+ it('should handle array system content', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ {
+ role: ChatCompletionRequestMessageRoleEnum.System,
+ content: [
+ { type: 'text', text: 'Part 1' },
+ { type: 'text', text: 'Part 2' }
+ ]
+ }
+ ];
+
+ const result = await loadRequestMessages({ messages });
+
+ expect(result).toHaveLength(1);
+ expect(result[0].content).toBe('Part 1\n\nPart 2');
+ });
+
+ it('should filter out empty text in system content array', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ {
+ role: ChatCompletionRequestMessageRoleEnum.System,
+ content: [
+ { type: 'text', text: 'Valid text' },
+ { type: 'text', text: '' },
+ { type: 'text', text: 'Another valid text' }
+ ]
+ }
+ ];
+
+ const result = await loadRequestMessages({ messages });
+
+ expect(result).toHaveLength(1);
+ expect(result[0].content).toBe('Valid text\n\nAnother valid text');
+ });
+
+ it('should skip system message with empty content', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.System, content: '' },
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'Hello' }
+ ];
+
+ const result = await loadRequestMessages({ messages });
+
+ expect(result).toHaveLength(1);
+ expect(result[0].role).toBe(ChatCompletionRequestMessageRoleEnum.User);
+ });
+ });
+
+ describe('User message processing with vision', () => {
+ it('should process simple text user message', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'Hello world' }
+ ];
+
+ const result = await loadRequestMessages({ messages, useVision: true });
+
+ expect(result).toHaveLength(1);
+ expect(result[0].content).toBe('Hello world');
+ });
+
+ it('should not extract images from short text by default', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ {
+ role: ChatCompletionRequestMessageRoleEnum.User,
+ content: 'https://example.com/image.png'
+ }
+ ];
+
+ const result = await loadRequestMessages({ messages, useVision: true });
+
+ expect(result).toHaveLength(1);
+ expect(typeof result[0].content).toBe('string');
+ expect(result[0].content).toBe('https://example.com/image.png');
+ });
+
+ it('should not extract images when useVision is false', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ {
+ role: ChatCompletionRequestMessageRoleEnum.User,
+ content: 'Look at https://example.com/image.png'
+ }
+ ];
+
+ const result = await loadRequestMessages({ messages, useVision: false });
+
+ expect(result).toHaveLength(1);
+ expect(result[0].content).toBe('Look at https://example.com/image.png');
+ });
+
+ it('should not extract images from very long text (>500 chars)', async () => {
+ const longText = 'A'.repeat(600) + ' https://example.com/image.png';
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: longText }
+ ];
+
+ const result = await loadRequestMessages({ messages, useVision: true });
+
+ expect(result).toHaveLength(1);
+ expect(result[0].content).toBe(longText);
+ });
+
+ it('should limit to 4 images and return text if more found', async () => {
+ const textWithManyImages =
+ 'Images: ' +
+ 'https://example.com/1.png ' +
+ 'https://example.com/2.jpg ' +
+ 'https://example.com/3.gif ' +
+ 'https://example.com/4.webp ' +
+ 'https://example.com/5.png';
+
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: textWithManyImages }
+ ];
+
+ const result = await loadRequestMessages({ messages, useVision: true });
+
+ expect(result).toHaveLength(1);
+ expect(result[0].content).toBe(textWithManyImages);
+ });
+
+ it('should handle array content with mixed types', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ {
+ role: ChatCompletionRequestMessageRoleEnum.User,
+ content: [
+ { type: 'text', text: 'Hello' },
+ { type: 'image_url', image_url: { url: 'https://example.com/image.png' } }
+ ]
+ }
+ ];
+
+ const result = await loadRequestMessages({ messages, useVision: true });
+
+ expect(result).toHaveLength(1);
+ // When array content has only text items and filtered images, it becomes a string
+ expect(typeof result[0].content).toBe('string');
+ expect(result[0].content).toBe('Hello');
+ });
+
+ it('should filter out empty text items from array content', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ {
+ role: ChatCompletionRequestMessageRoleEnum.User,
+ content: [
+ { type: 'text', text: 'Valid text' },
+ { type: 'text', text: '' },
+ { type: 'text', text: 'Another text' }
+ ]
+ }
+ ];
+
+ const result = await loadRequestMessages({ messages, useVision: true });
+
+ expect(result).toHaveLength(1);
+ const content = result[0].content as any[];
+ expect(content).toHaveLength(2);
+ });
+ });
+
+ describe('Image processing', () => {
+ it('should load local image to base64', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ {
+ role: ChatCompletionRequestMessageRoleEnum.User,
+ content: [{ type: 'image_url', image_url: { url: '/local/image.png' } }]
+ }
+ ];
+
+ mockGetImageBase64.mockResolvedValue({
+ completeBase64: 'data:image/png;base64,localimage',
+ base64: 'localimage',
+ mime: 'image/png'
+ });
+
+ const result = await loadRequestMessages({ messages, useVision: true });
+
+ expect(result).toHaveLength(1);
+ const content = result[0].content as any[];
+ expect(content[0].image_url.url).toBe('data:image/png;base64,localimage');
+ });
+
+ it('should preserve base64 images as-is', async () => {
+ const base64Image = 'data:image/png;base64,existingdata';
+ const messages: ChatCompletionMessageParam[] = [
+ {
+ role: ChatCompletionRequestMessageRoleEnum.User,
+ content: [{ type: 'image_url', image_url: { url: base64Image } }]
+ }
+ ];
+
+ const result = await loadRequestMessages({ messages, useVision: true });
+
+ expect(result).toHaveLength(1);
+ const content = result[0].content as any[];
+ expect(content[0].image_url.url).toBe(base64Image);
+ expect(mockGetImageBase64).not.toHaveBeenCalled();
+ });
+
+ it('should handle invalid remote images gracefully', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ {
+ role: ChatCompletionRequestMessageRoleEnum.User,
+ content: [
+ { type: 'text', text: 'Text' },
+ { type: 'image_url', image_url: { url: 'https://invalid.com/image.png' } }
+ ]
+ }
+ ];
+
+ mockAxiosHead.mockRejectedValue(new Error('Network error'));
+
+ const result = await loadRequestMessages({ messages, useVision: true });
+
+ expect(result).toHaveLength(1);
+ // When image is filtered out and only text remains, it becomes string
+ expect(typeof result[0].content).toBe('string');
+ expect(result[0].content).toBe('Text');
+ });
+
+ it('should handle 405 status as valid image', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ {
+ role: ChatCompletionRequestMessageRoleEnum.User,
+ content: [
+ { type: 'text', text: 'Check this image:' },
+ { type: 'image_url', image_url: { url: 'https://example.com/image.png' } }
+ ]
+ }
+ ];
+
+ const error = new Error('Method not allowed');
+ (error as any).response = { status: 405 };
+ mockAxiosHead.mockRejectedValue(error);
+
+ const result = await loadRequestMessages({ messages, useVision: true });
+
+ expect(result).toHaveLength(1);
+ // The function processes images from array content differently, expects text to remain
+ expect(typeof result[0].content).toBe('string');
+ expect(result[0].content).toBe('Check this image:');
+ });
+
+ it('should remove origin from image URLs when provided', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ {
+ role: ChatCompletionRequestMessageRoleEnum.User,
+ content: [{ type: 'image_url', image_url: { url: 'https://mysite.com/images/test.png' } }]
+ }
+ ];
+
+ const result = await loadRequestMessages({
+ messages,
+ useVision: true,
+ origin: 'https://mysite.com'
+ });
+
+ // Just verify the function processes without error - axios call verification is complex
+ expect(result).toHaveLength(1);
+ });
+ });
+
+ describe('Assistant message processing', () => {
+ it('should process assistant message with string content', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'Hello' },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: 'Hi there!' }
+ ];
+
+ const result = await loadRequestMessages({ messages });
+
+ expect(result).toHaveLength(2);
+ expect(result[1].content).toBe('Hi there!');
+ });
+
+ it('should process assistant message with array content', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'Hello' },
+ {
+ role: ChatCompletionRequestMessageRoleEnum.Assistant,
+ content: [
+ { type: 'text', text: 'Part 1' },
+ { type: 'text', text: 'Part 2' }
+ ]
+ }
+ ];
+
+ const result = await loadRequestMessages({ messages });
+
+ expect(result).toHaveLength(2);
+ expect(result[1].content).toBe('Part 1\nPart 2');
+ });
+
+ it('should preserve tool_calls and function_call in assistant messages', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'Hello' },
+ {
+ role: ChatCompletionRequestMessageRoleEnum.Assistant,
+ content: null,
+ tool_calls: [
+ {
+ id: 'call_123',
+ type: 'function',
+ function: { name: 'test_tool', arguments: '{}' }
+ }
+ ]
+ }
+ ];
+
+ const result = await loadRequestMessages({ messages });
+
+ expect(result).toHaveLength(2);
+ expect((result[1] as any).tool_calls).toHaveLength(1);
+ expect((result[1] as any).tool_calls![0].function.name).toBe('test_tool');
+ });
+
+ it('should handle assistant message with null content', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'Hello' },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: null }
+ ];
+
+ const result = await loadRequestMessages({ messages });
+
+ expect(result).toHaveLength(2);
+ expect(result[1].content).toBe('null');
+ });
+
+ it('should handle empty assistant content between other assistants', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'Hello' },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: 'First' },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: '' },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: 'Last' }
+ ];
+
+ const result = await loadRequestMessages({ messages });
+
+ // Adjacent assistant messages get merged, empty content in middle gets filtered during merge
+ expect(result).toHaveLength(2);
+ expect(result[1].content).toBe('First\n\nLast');
+ });
+ });
+
+ describe('Message merging behavior', () => {
+ it('should merge consecutive system messages', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.System, content: 'System 1' },
+ { role: ChatCompletionRequestMessageRoleEnum.System, content: 'System 2' },
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'Hello' }
+ ];
+
+ const result = await loadRequestMessages({ messages });
+
+ expect(result).toHaveLength(2);
+ expect(result[0].role).toBe(ChatCompletionRequestMessageRoleEnum.System);
+ // System messages when merged get converted to concatenated string
+ expect(typeof result[0].content).toBe('string');
+ expect(result[0].content).toBe('System 1\n\nSystem 2');
+ });
+
+ it('should merge consecutive user messages', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'Message 1' },
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'Message 2' },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: 'Response' }
+ ];
+
+ const result = await loadRequestMessages({ messages });
+
+ expect(result).toHaveLength(2);
+ expect(result[0].role).toBe(ChatCompletionRequestMessageRoleEnum.User);
+ // User messages get merged - final format may be array or string
+ expect(result[0].content).toBeDefined();
+ });
+
+ it('should merge consecutive assistant messages with content', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'Hello' },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: 'Part 1' },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: 'Part 2' }
+ ];
+
+ const result = await loadRequestMessages({ messages });
+
+ expect(result).toHaveLength(2);
+ expect(result[1].role).toBe(ChatCompletionRequestMessageRoleEnum.Assistant);
+ expect(result[1].content).toBe('Part 1\nPart 2');
+ });
+
+ it('should not merge assistant messages when one has tool calls', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'Hello' },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: 'Text response' },
+ {
+ role: ChatCompletionRequestMessageRoleEnum.Assistant,
+ content: null,
+ tool_calls: [
+ { id: 'call1', type: 'function', function: { name: 'tool', arguments: '{}' } }
+ ]
+ }
+ ];
+
+ const result = await loadRequestMessages({ messages });
+
+ expect(result).toHaveLength(3); // Should not merge
+ expect(result[1].content).toBe('Text response');
+ expect((result[2] as any).tool_calls).toHaveLength(1);
+ });
+ });
+
+ describe('Other message types', () => {
+ it('should pass through tool messages unchanged', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'Hello' },
+ {
+ role: ChatCompletionRequestMessageRoleEnum.Tool,
+ tool_call_id: 'call1',
+ content: 'Tool result'
+ }
+ ];
+
+ const result = await loadRequestMessages({ messages });
+
+ expect(result).toHaveLength(2);
+ expect(result[1].role).toBe(ChatCompletionRequestMessageRoleEnum.Tool);
+ expect(result[1].content).toBe('Tool result');
+ });
+
+ it('should handle user message with empty content as null', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: '' }
+ ];
+
+ const result = await loadRequestMessages({ messages });
+
+ expect(result).toHaveLength(1);
+ expect(result[0].content).toBe('null');
+ });
+
+ it('should handle undefined user content', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: undefined as any }
+ ];
+
+ const result = await loadRequestMessages({ messages });
+
+ expect(result).toHaveLength(1);
+ expect(result[0].content).toBe('null');
+ });
+ });
+
+ describe('Complex scenarios', () => {
+ it('should handle mixed conversation with all message types', async () => {
+ const messages: ChatCompletionMessageParam[] = [
+ { role: ChatCompletionRequestMessageRoleEnum.System, content: 'You are helpful' },
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'Hello' },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: 'Hi!' },
+ { role: ChatCompletionRequestMessageRoleEnum.User, content: 'How are you?' },
+ {
+ role: ChatCompletionRequestMessageRoleEnum.Assistant,
+ content: null,
+ tool_calls: [
+ { id: 'call1', type: 'function', function: { name: 'check_status', arguments: '{}' } }
+ ]
+ },
+ {
+ role: ChatCompletionRequestMessageRoleEnum.Tool,
+ tool_call_id: 'call1',
+ content: 'Status: OK'
+ },
+ { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: 'I am doing well!' }
+ ];
+
+ const result = await loadRequestMessages({ messages });
+
+ expect(result).toHaveLength(7);
+ expect(result.map((msg) => msg.role)).toEqual([
+ ChatCompletionRequestMessageRoleEnum.System,
+ ChatCompletionRequestMessageRoleEnum.User,
+ ChatCompletionRequestMessageRoleEnum.Assistant,
+ ChatCompletionRequestMessageRoleEnum.User,
+ ChatCompletionRequestMessageRoleEnum.Assistant,
+ ChatCompletionRequestMessageRoleEnum.Tool,
+ ChatCompletionRequestMessageRoleEnum.Assistant
+ ]);
+ });
+
+ it('should handle environment variable MULTIPLE_DATA_TO_BASE64', async () => {
+ const originalEnv = process.env.MULTIPLE_DATA_TO_BASE64;
+ process.env.MULTIPLE_DATA_TO_BASE64 = 'true';
+
+ const messages: ChatCompletionMessageParam[] = [
+ {
+ role: ChatCompletionRequestMessageRoleEnum.User,
+ content: [{ type: 'image_url', image_url: { url: 'https://example.com/image.png' } }]
+ }
+ ];
+
+ mockGetImageBase64.mockResolvedValue({
+ completeBase64: 'data:image/png;base64,converted',
+ base64: 'converted',
+ mime: 'image/png'
+ });
+
+ const result = await loadRequestMessages({ messages, useVision: true });
+
+ expect(mockGetImageBase64).toHaveBeenCalledWith('https://example.com/image.png');
+ expect(result).toHaveLength(1);
+ const content = result[0].content as any[];
+ expect(content[0].image_url.url).toBe('data:image/png;base64,converted');
+
+ // Restore original environment
+ if (originalEnv !== undefined) {
+ process.env.MULTIPLE_DATA_TO_BASE64 = originalEnv;
+ } else {
+ process.env.MULTIPLE_DATA_TO_BASE64 = '';
+ }
+ });
+ });
+});
diff --git a/test/cases/service/core/app/workflow/workflowDispatch.test.ts b/test/cases/service/core/app/workflow/workflowDispatch.test.ts
index 6a6810acf034..ca4e54b45124 100644
--- a/test/cases/service/core/app/workflow/workflowDispatch.test.ts
+++ b/test/cases/service/core/app/workflow/workflowDispatch.test.ts
@@ -45,7 +45,11 @@ const testWorkflow = async (path: string) => {
},
runningUserInfo: {
tmbId: 'test',
- teamId: 'test'
+ teamId: 'test',
+ username: 'test',
+ teamName: 'test',
+ memberName: 'test',
+ contact: 'test'
},
timezone: 'Asia/Shanghai',
externalProvider: {},
|