-
Notifications
You must be signed in to change notification settings - Fork 5k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Co-authored-by: heheer <[email protected]>
- Loading branch information
1 parent
5bca15f
commit 9501c3f
Showing
170 changed files
with
5,785 additions
and
2,341 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
--- | ||
title: 'V4.7(进行中)' | ||
description: 'FastGPT V4.7更新说明' | ||
icon: 'upgrade' | ||
draft: false | ||
toc: true | ||
weight: 826 | ||
--- | ||
|
||
## 修改配置文件 | ||
|
||
增加一些 Boolean 值,用于决定不同功能块可以使用哪些模型:[点击查看最新的配置文件](/docs/development/configuration/) | ||
|
||
|
||
## V4.7 更新说明 | ||
|
||
1. 新增 - 工具调用模块,可以让LLM模型根据用户意图,动态的选择其他模型或插件执行。 | ||
2. 新增 - 分类和内容提取支持 functionCall 模式。部分模型支持 functionCall 不支持 ToolCall,也可以使用了。需要把 LLM 模型配置文件里的 `functionCall` 设置为 `true`, `toolChoice`设置为 `false`。如果 `toolChoice` 为 true,会走 tool 模式。 | ||
3. 优化 - 高级编排性能 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,27 @@ | ||
export enum ChatCompletionRequestMessageRoleEnum { | ||
'System' = 'system', | ||
'User' = 'user', | ||
'Assistant' = 'assistant', | ||
'Function' = 'function', | ||
'Tool' = 'tool' | ||
} | ||
|
||
export enum ChatMessageTypeEnum { | ||
text = 'text', | ||
image_url = 'image_url' | ||
} | ||
|
||
export enum LLMModelTypeEnum { | ||
all = 'all', | ||
classify = 'classify', | ||
extractFields = 'extractFields', | ||
toolCall = 'toolCall', | ||
queryExtension = 'queryExtension' | ||
} | ||
export const llmModelTypeFilterMap = { | ||
[LLMModelTypeEnum.all]: 'model', | ||
[LLMModelTypeEnum.classify]: 'usedInClassify', | ||
[LLMModelTypeEnum.extractFields]: 'usedInExtractFields', | ||
[LLMModelTypeEnum.toolCall]: 'usedInToolCall', | ||
[LLMModelTypeEnum.queryExtension]: 'usedInQueryExtension' | ||
}; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,24 +1,40 @@ | ||
import openai from 'openai'; | ||
import type { | ||
ChatCompletion, | ||
ChatCompletionCreateParams, | ||
ChatCompletionMessageToolCall, | ||
ChatCompletionChunk, | ||
ChatCompletionMessageParam, | ||
ChatCompletionContentPart | ||
ChatCompletionToolMessageParam, | ||
ChatCompletionAssistantMessageParam | ||
} from 'openai/resources'; | ||
import { ChatMessageTypeEnum } from './constants'; | ||
|
||
export type ChatCompletionContentPart = ChatCompletionContentPart; | ||
export type ChatCompletionCreateParams = ChatCompletionCreateParams; | ||
export type ChatMessageItemType = Omit<ChatCompletionMessageParam, 'name'> & { | ||
name?: any; | ||
export * from 'openai/resources'; | ||
|
||
export type ChatCompletionMessageParam = ChatCompletionMessageParam & { | ||
dataId?: string; | ||
content: any; | ||
} & any; | ||
}; | ||
export type ChatCompletionToolMessageParam = ChatCompletionToolMessageParam & { name: string }; | ||
export type ChatCompletionAssistantToolParam = { | ||
role: 'assistant'; | ||
tool_calls: ChatCompletionMessageToolCall[]; | ||
}; | ||
|
||
export type ChatCompletion = ChatCompletion; | ||
export type ChatCompletionMessageToolCall = ChatCompletionMessageToolCall & { | ||
toolName?: string; | ||
toolAvatar?: string; | ||
}; | ||
export type ChatCompletionMessageFunctionCall = ChatCompletionAssistantMessageParam.FunctionCall & { | ||
id?: string; | ||
toolName?: string; | ||
toolAvatar?: string; | ||
}; | ||
export type StreamChatType = Stream<ChatCompletionChunk>; | ||
|
||
export type PromptTemplateItem = { | ||
title: string; | ||
desc: string; | ||
value: string; | ||
}; | ||
|
||
export default openai; | ||
export * from 'openai'; |
Oops, something went wrong.