提交 86f4f9e4 authored 作者: 王鹏飞's avatar 王鹏飞

chore: update

上级 4a4f3c5b
......@@ -17,14 +17,14 @@
"@fortaine/fetch-event-source": "^3.0.6",
"@tanstack/react-query": "^5.69.0",
"@tanstack/react-query-devtools": "^5.69.0",
"antd": "^5.24.5",
"antd": "^5.24.8",
"axios": "^1.8.4",
"blueimp-md5": "^2.19.0",
"echarts": "^5.6.0",
"echarts-for-react": "^3.0.2",
"echarts-wordcloud": "^2.1.0",
"lodash-es": "^4.17.21",
"lucide-react": "^0.484.0",
"lucide-react": "^0.503.0",
"normalize.css": "^8.0.1",
"react": "^18.3.1",
"react-dom": "^18.3.1",
......@@ -2116,9 +2116,9 @@
}
},
"node_modules/antd": {
"version": "5.24.5",
"resolved": "https://registry.npmjs.org/antd/-/antd-5.24.5.tgz",
"integrity": "sha512-1lAv/G+9ewQanyoAo3JumQmIlVxwo5QwWGb6QCHYc40Cq0NxC/EzITcjsgq1PSaTUpLkKq8A2l7Fjtu47vqQBg==",
"version": "5.24.8",
"resolved": "https://registry.npmjs.org/antd/-/antd-5.24.8.tgz",
"integrity": "sha512-vJcW81WSRq+ymBKTiA3NE+FddmiqTAKxdWVRZU+HnLLrRrIz896svcUxXFPa7M4mH9HqyeJ5JPOHsne4sQAC1A==",
"license": "MIT",
"dependencies": {
"@ant-design/colors": "^7.2.0",
......@@ -2144,12 +2144,12 @@
"rc-dropdown": "~4.2.1",
"rc-field-form": "~2.7.0",
"rc-image": "~7.11.1",
"rc-input": "~1.7.3",
"rc-input-number": "~9.4.0",
"rc-mentions": "~2.19.1",
"rc-input": "~1.8.0",
"rc-input-number": "~9.5.0",
"rc-mentions": "~2.20.0",
"rc-menu": "~9.16.1",
"rc-motion": "^2.9.5",
"rc-notification": "~5.6.3",
"rc-notification": "~5.6.4",
"rc-pagination": "~5.1.0",
"rc-picker": "~4.11.3",
"rc-progress": "~4.0.0",
......@@ -2161,8 +2161,8 @@
"rc-steps": "~6.0.1",
"rc-switch": "~4.1.0",
"rc-table": "~7.50.4",
"rc-tabs": "~15.5.1",
"rc-textarea": "~1.9.0",
"rc-tabs": "~15.6.0",
"rc-textarea": "~1.10.0",
"rc-tooltip": "~6.4.0",
"rc-tree": "~5.13.1",
"rc-tree-select": "~5.27.0",
......@@ -3856,9 +3856,9 @@
}
},
"node_modules/lucide-react": {
"version": "0.484.0",
"resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.484.0.tgz",
"integrity": "sha512-oZy8coK9kZzvqhSgfbGkPtTgyjpBvs3ukLgDPv14dSOZtBtboryWF5o8i3qen7QbGg7JhiJBz5mK1p8YoMZTLQ==",
"version": "0.503.0",
"resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.503.0.tgz",
"integrity": "sha512-HGGkdlPWQ0vTF8jJ5TdIqhQXZi6uh3LnNgfZ8MHiuxFfX3RZeA79r2MW2tHAZKlAVfoNE8esm3p+O6VkIvpj6w==",
"license": "ISC",
"peerDependencies": {
"react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0"
......@@ -5218,9 +5218,9 @@
}
},
"node_modules/rc-input": {
"version": "1.7.3",
"resolved": "https://registry.npmjs.org/rc-input/-/rc-input-1.7.3.tgz",
"integrity": "sha512-A5w4egJq8+4JzlQ55FfQjDnPvOaAbzwC3VLOAdOytyek3TboSOP9qxN+Gifup+shVXfvecBLBbWBpWxmk02SWQ==",
"version": "1.8.0",
"resolved": "https://registry.npmjs.org/rc-input/-/rc-input-1.8.0.tgz",
"integrity": "sha512-KXvaTbX+7ha8a/k+eg6SYRVERK0NddX8QX7a7AnRvUa/rEH0CNMlpcBzBkhI0wp2C8C4HlMoYl8TImSN+fuHKA==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.11.1",
......@@ -5233,15 +5233,15 @@
}
},
"node_modules/rc-input-number": {
"version": "9.4.0",
"resolved": "https://registry.npmjs.org/rc-input-number/-/rc-input-number-9.4.0.tgz",
"integrity": "sha512-Tiy4DcXcFXAf9wDhN8aUAyMeCLHJUHA/VA/t7Hj8ZEx5ETvxG7MArDOSE6psbiSCo+vJPm4E3fGN710ITVn6GA==",
"version": "9.5.0",
"resolved": "https://registry.npmjs.org/rc-input-number/-/rc-input-number-9.5.0.tgz",
"integrity": "sha512-bKaEvB5tHebUURAEXw35LDcnRZLq3x1k7GxfAqBMzmpHkDGzjAtnUL8y4y5N15rIFIg5IJgwr211jInl3cipag==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.10.1",
"@rc-component/mini-decimal": "^1.0.1",
"classnames": "^2.2.5",
"rc-input": "~1.7.1",
"rc-input": "~1.8.0",
"rc-util": "^5.40.1"
},
"peerDependencies": {
......@@ -5250,17 +5250,17 @@
}
},
"node_modules/rc-mentions": {
"version": "2.19.1",
"resolved": "https://registry.npmjs.org/rc-mentions/-/rc-mentions-2.19.1.tgz",
"integrity": "sha512-KK3bAc/bPFI993J3necmaMXD2reZTzytZdlTvkeBbp50IGH1BDPDvxLdHDUrpQx2b2TGaVJsn+86BvYa03kGqA==",
"version": "2.20.0",
"resolved": "https://registry.npmjs.org/rc-mentions/-/rc-mentions-2.20.0.tgz",
"integrity": "sha512-w8HCMZEh3f0nR8ZEd466ATqmXFCMGMN5UFCzEUL0bM/nGw/wOS2GgRzKBcm19K++jDyuWCOJOdgcKGXU3fXfbQ==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.22.5",
"@rc-component/trigger": "^2.0.0",
"classnames": "^2.2.6",
"rc-input": "~1.7.1",
"rc-input": "~1.8.0",
"rc-menu": "~9.16.0",
"rc-textarea": "~1.9.0",
"rc-textarea": "~1.10.0",
"rc-util": "^5.34.1"
},
"peerDependencies": {
......@@ -5302,9 +5302,9 @@
}
},
"node_modules/rc-notification": {
"version": "5.6.3",
"resolved": "https://registry.npmjs.org/rc-notification/-/rc-notification-5.6.3.tgz",
"integrity": "sha512-42szwnn8VYQoT6GnjO00i1iwqV9D1TTMvxObWsuLwgl0TsOokzhkYiufdtQBsJMFjJravS1hfDKVMHLKLcPE4g==",
"version": "5.6.4",
"resolved": "https://registry.npmjs.org/rc-notification/-/rc-notification-5.6.4.tgz",
"integrity": "sha512-KcS4O6B4qzM3KH7lkwOB7ooLPZ4b6J+VMmQgT51VZCeEcmghdeR4IrMcFq0LG+RPdnbe/ArT086tGM8Snimgiw==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.10.1",
......@@ -5550,9 +5550,9 @@
}
},
"node_modules/rc-tabs": {
"version": "15.5.1",
"resolved": "https://registry.npmjs.org/rc-tabs/-/rc-tabs-15.5.1.tgz",
"integrity": "sha512-yiWivLAjEo5d1v2xlseB2dQocsOhkoVSfo1krS8v8r+02K+TBUjSjXIf7dgyVSxp6wRIPv5pMi5hanNUlQMgUA==",
"version": "15.6.0",
"resolved": "https://registry.npmjs.org/rc-tabs/-/rc-tabs-15.6.0.tgz",
"integrity": "sha512-SQ99Yjc9ewrJCUwoWPKq0FeGL2znWsqPhfcZgsHz1R7bkA2rMNe7CPgOiJkwppdJ98wkLhzs9vPrv21QOE1RyQ==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.11.2",
......@@ -5572,14 +5572,14 @@
}
},
"node_modules/rc-textarea": {
"version": "1.9.0",
"resolved": "https://registry.npmjs.org/rc-textarea/-/rc-textarea-1.9.0.tgz",
"integrity": "sha512-dQW/Bc/MriPBTugj2Kx9PMS5eXCCGn2cxoIaichjbNvOiARlaHdI99j4DTxLl/V8+PIfW06uFy7kjfUIDDKyxQ==",
"version": "1.10.0",
"resolved": "https://registry.npmjs.org/rc-textarea/-/rc-textarea-1.10.0.tgz",
"integrity": "sha512-ai9IkanNuyBS4x6sOL8qu/Ld40e6cEs6pgk93R+XLYg0mDSjNBGey6/ZpDs5+gNLD7urQ14po3V6Ck2dJLt9SA==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.10.1",
"classnames": "^2.2.1",
"rc-input": "~1.7.1",
"rc-input": "~1.8.0",
"rc-resize-observer": "^1.0.0",
"rc-util": "^5.27.0"
},
......
......@@ -19,14 +19,14 @@
"@fortaine/fetch-event-source": "^3.0.6",
"@tanstack/react-query": "^5.69.0",
"@tanstack/react-query-devtools": "^5.69.0",
"antd": "^5.24.5",
"antd": "^5.24.8",
"axios": "^1.8.4",
"blueimp-md5": "^2.19.0",
"echarts": "^5.6.0",
"echarts-for-react": "^3.0.2",
"echarts-wordcloud": "^2.1.0",
"lodash-es": "^4.17.21",
"lucide-react": "^0.484.0",
"lucide-react": "^0.503.0",
"normalize.css": "^8.0.1",
"react": "^18.3.1",
"react-dom": "^18.3.1",
......
......@@ -3,5 +3,5 @@ import MarkdownRender from '@/components/MarkdownRender'
const renderMarkdown = (content: string) => <MarkdownRender>{content}</MarkdownRender>
export default function AIBubble(props: BubbleProps) {
return <Bubble messageRender={renderMarkdown} {...props}></Bubble>
return <Bubble messageRender={renderMarkdown} typing={{ step: 5, interval: 50 }} {...props}></Bubble>
}
import { Bubble } from '@ant-design/x'
import { GetProp } from 'antd'
import type { AIMessage } from './types'
import MarkdownRender from '@/components/MarkdownRender'
import { useMemo } from 'react'
const renderMarkdown = (content: string) => <MarkdownRender>{content}</MarkdownRender>
const roles: GetProp<typeof Bubble.List, 'roles'> = {
assistant: {
placement: 'start',
typing: { step: 5, interval: 50 },
messageRender: renderMarkdown,
styles: { content: { maxWidth: '80%' } },
},
user: {
placement: 'end',
messageRender: renderMarkdown,
styles: { content: { maxWidth: '80%' } },
},
}
export default function AIBubbleList({ messages, style }: { messages: AIMessage[]; style?: React.CSSProperties }) {
const items = useMemo(() => {
return messages.map((item: AIMessage) => {
return {
key: item.id,
role: item.role,
content: item.full_content || item.content,
loading: item.status === 'loading',
}
})
}, [messages])
return <Bubble.List roles={roles} items={items} style={style} />
}
......@@ -64,9 +64,9 @@
.message-scroll {
margin: 10px 0;
// height: 600px;
height: 100%;
overflow-x: hidden;
overflow-y: auto;
// height: 100%;
// overflow-x: hidden;
// overflow-y: auto;
}
.message-item {
display: flex;
......
import { useState, KeyboardEvent, useEffect, useRef } from 'react'
import { useState, KeyboardEvent } from 'react'
import { Button, Card, FloatButton, Input, Select } from 'antd'
import { CircleArrowLeft, CircleArrowRight } from 'lucide-react'
import { OpenAIOutlined, ArrowUpOutlined } from '@ant-design/icons'
import { useAIStore } from './useAIStore'
// import MarkdownRender from '@/components/MarkdownRender'
import AIBubble from './AIBubble'
import './AIChat.scss'
import AIBubbleList from './AIBubbleList'
// export const MessageItem = ({ message }: { message: AIMessage }) => {
// return (
// <div className={`message-item ${message.role}`}>
// <div className="message-box">
// <div className="message-content">
// <MarkdownRender>{message.content}</MarkdownRender>
// </div>
// </div>
// </div>
// )
// }
import './AIChat.scss'
export default function AIChat() {
const { ai, setAI, options, post, messages, isLoading, collapsed, toggleCollapsed } = useAIStore()
const ai = useAIStore((state) => state.ai)
const setAI = useAIStore((state) => state.setAI)
const options = useAIStore((state) => state.options)
const post = useAIStore((state) => state.post)
const messages = useAIStore((state) => state.messages)
const isLoading = useAIStore((state) => state.isLoading)
const collapsed = useAIStore((state) => state.collapsed)
const toggleCollapsed = useAIStore((state) => state.toggleCollapsed)
const [content, setContent] = useState('')
......@@ -35,14 +30,6 @@ export default function AIChat() {
post({ messages: [{ role: 'user', content }] })
}
const messageScrollRef = useRef<HTMLDivElement | null>(null)
useEffect(() => {
if (messageScrollRef.current) {
const scrollContainer = messageScrollRef.current
scrollContainer.scrollTop = scrollContainer.scrollHeight
}
}, [messages])
if (collapsed) {
return (
<Card
......@@ -50,20 +37,7 @@ export default function AIChat() {
title="AI对话"
extra={<span onClick={toggleCollapsed}>{collapsed ? <CircleArrowRight /> : <CircleArrowLeft />}</span>}>
<div className="ai-chat-container">
<div className="message-scroll" ref={messageScrollRef}>
{messages.map((message, index) => {
return (
<AIBubble
placement={message.role === 'assistant' ? 'start' : 'end'}
content={message.full_content || message.content}
loading={message.status === 'loading'}
typing={isLoading && index === messages.length - 1}
key={index}
style={{ margin: '10px 0' }}
/>
)
})}
</div>
<AIBubbleList messages={messages} style={{ height: '100%' }} />
<div className="input-container">
<div className="input-box">
<div className="edit-area">
......
import axios from 'axios'
import md5 from 'blueimp-md5'
import { AIData, AIMessage } from './types'
import { extractJSON } from '@/utils/helper'
import { sseRequest, SSEOptions } from '@/utils/sseRequest'
import { AIMessage, ChatRequestData, GenerateImageRequestData, GenerateVideoRequestData } from './types'
import { request, RequestOptions } from './request'
function transform(messages: any[]): AIMessage {
return messages.reduce(
(result, message) => {
let delta = null
if (message.choices && message.choices.length > 0) {
delta = message.choices[0].delta
}
const content = result.content + (delta.content || '')
const reasoning_content = result.reasoning_content + (delta.reasoning_content || '')
let full_content = ''
if (reasoning_content) {
full_content = `<div class="ai-thinking">${reasoning_content}`
if (content) {
full_content += `</div>${content}`
}
} else {
full_content = content
}
return {
id: message.id,
role: 'assistant',
content,
reasoning_content,
full_content,
json: extractJSON(content),
}
},
{ content: '', reasoning_content: '' }
)
function transform(message: any): AIMessage | null {
if (message.choices && message.choices.length > 0) {
const delta = message.choices[0].delta
return {
role: 'assistant',
reasoning_content: delta.reasoning_content,
content: delta.content,
}
}
return null
}
// 文心一言
export async function yiyan(data: AIData, options: SSEOptions) {
export async function chatBaidu(data: ChatRequestData, options: RequestOptions) {
const AK = 'wY7bvMpkWeZbDVq9w3EDvpjU'
const SK = 'XJwpiJWxs5HXkOtbo6tQrvYPZFJAWdAy'
const resp = await axios.post(
`/api/qianfan/oauth/2.0/token?grant_type=client_credentials&client_id=${AK}&client_secret=${SK}`
)
await sseRequest(
await request(
`/api/qianfan/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant?access_token=${resp.data.access_token}`,
{
...options,
......@@ -53,8 +33,8 @@ export async function yiyan(data: AIData, options: SSEOptions) {
}
// DeepSeek
export async function deepseek(data: AIData, options: SSEOptions) {
await sseRequest(
export async function chatDeepSeek(data: ChatRequestData, options: RequestOptions) {
await request(
'/api/deepseek/chat/completions',
{
...options,
......@@ -66,8 +46,11 @@ export async function deepseek(data: AIData, options: SSEOptions) {
}
// SiliconFlow
export async function siliconflow(data: AIData, options: SSEOptions) {
await sseRequest(
export async function chatSiliconFlow(data: ChatRequestData, options: RequestOptions) {
if (data.model === 'Qwen/QwQ-32B') {
delete data.response_format
}
await request(
'/api/siliconflow/v1/chat/completions',
{
...options,
......@@ -82,26 +65,26 @@ export async function siliconflow(data: AIData, options: SSEOptions) {
}
// 通义千问
export async function qwen(data: AIData, options: SSEOptions) {
await sseRequest(
export async function chatQwen(data: ChatRequestData, options: RequestOptions) {
await request(
'/api/qwen/compatible-mode/v1/chat/completions',
{
...options,
headers: { 'Content-Type': 'application/json', Authorization: 'Bearer sk-afd0fcdb53bf4058b2068b8548820150' },
body: JSON.stringify({ model: 'qwen-max-latest', stream: true, ...data }),
body: JSON.stringify({ ...data, model: 'qwen-max-latest', stream: true }),
},
transform
)
}
// 天工
export async function tiangong(data: AIData, options: SSEOptions) {
export async function chatTiangong(data: ChatRequestData, options: RequestOptions) {
const appKey = 'a8701b73637562d33a53c668a90ee3be'
const appSecret = 'e191593f486bb88a39c634f46926762dddc97b9082e192af'
const timestamp = Math.floor(Date.now() / 1000).toString()
const sign = md5(`${appKey}${appSecret}${timestamp}`)
await sseRequest('/api/tiangong/sky-saas-writing/api/v1/chat', {
await request('/api/tiangong/sky-saas-writing/api/v1/chat', {
...options,
headers: {
'Content-Type': 'application/json',
......@@ -115,8 +98,11 @@ export async function tiangong(data: AIData, options: SSEOptions) {
}
// OpenAI
export async function openAI(data: AIData, options: SSEOptions) {
await sseRequest(
export async function chatCreate(data: ChatRequestData, options: RequestOptions) {
if (data.model === 'Qwen/QwQ-32B') {
delete data.response_format
}
await request(
'/api/openai/chat/create',
{
...options,
......@@ -127,8 +113,34 @@ export async function openAI(data: AIData, options: SSEOptions) {
)
}
// 文生图
export async function imageCreate(data: GenerateImageRequestData, options: RequestOptions) {
const resp = await request(
'/api/openai/image/create',
{
...options,
headers: { 'Content-Type': 'application/json', Authorization: 'ezijing@20250331' },
body: JSON.stringify({ ...data, model: 'image-01' }),
},
(response) => {
return { ...response.data, content: '图片生成成功' }
}
)
return resp
}
// 文生视频
export async function videoCreate(data: GenerateVideoRequestData, options: RequestOptions) {
const resp = await request('/api/openai/video/create', {
...options,
headers: { 'Content-Type': 'application/json', Authorization: 'ezijing@20250331' },
body: JSON.stringify(data),
})
return resp
}
const aiService = {
async post(data: AIData, options: SSEOptions) {
async post(data: any, options: RequestOptions, method?: string) {
const messages: AIMessage[] = []
const dataset = localStorage.getItem('dataset')
if (dataset) {
......@@ -136,17 +148,18 @@ const aiService = {
messages.push({ role: 'system', content: `这是一个数据集:${JSON.stringify(datasetInfo)}` })
}
data.messages = [...messages, ...data.messages]
const providers = {
yiyan,
deepseek,
siliconflow,
qwen,
tiangong,
openAI,
chatBaidu,
chatDeepSeek,
chatSiliconFlow,
chatQwen,
chatTiangong,
chatCreate,
imageCreate,
videoCreate,
}
const provider = providers[data.model as keyof typeof providers] || openAI
const provider = providers[method as keyof typeof providers] || chatCreate
await provider(data, options)
},
}
......
......@@ -8,6 +8,6 @@ export const AI_OPTIONS: AIOption[] = [
{ label: 'doubao-pro-32k-241215', value: 'doubao-pro-32k-241215' },
{ label: 'qwen-max-latest', value: 'qwen-max-latest' },
{ label: 'qwen-long', value: 'qwen-long' },
{ label: 'hunyuan-t1-latest', value: 'hunyuan-t1-latest' },
{ label: 'hunyuan-standard-256K', value: 'hunyuan-standard-256K' },
// { label: 'hunyuan-t1-latest', value: 'hunyuan-t1-latest' },
// { label: 'hunyuan-standard-256K', value: 'hunyuan-standard-256K' },
]
import { XStream } from '@ant-design/x'
import { message as antdMessage } from 'antd'
import { uniqueId, throttle } from 'lodash-es'
import { extractJSON } from '@/utils/helper'
import { AIMessage } from './types'
export interface RequestOptions extends RequestInit {
onUpdate?: (data: any) => void
onSuccess?: (data: any) => void
onError?: (error: any) => void
}
function transformFn<T>(data: T, transform?: (data: T) => any): any {
return transform ? transform(data) : data
}
function mergeFn(messages: AIMessage[], merge?: (messages: AIMessage[]) => AIMessage) {
return merge
? merge(messages)
: messages.reduce(
(result, message) => {
const content = result.content + (message.content || '')
const reasoning_content = result.reasoning_content + (message.reasoning_content || '')
let full_content = ''
if (reasoning_content) {
full_content = `<div class="ai-thinking">${reasoning_content}`
if (content) {
full_content += `</div>${content}`
}
} else {
full_content = content
}
return {
...result,
...message,
content,
reasoning_content,
full_content,
json: extractJSON(content),
}
},
{ content: '', reasoning_content: '' }
)
}
export async function request(
url: string,
options: RequestOptions = {},
transform?: (data: any) => any,
merge?: (messages: AIMessage[]) => AIMessage
) {
const { onUpdate, onSuccess, onError, body, ...fetchOptions } = options
const id = uniqueId('chat_')
const baseMessage: AIMessage = { id, role: 'assistant', content: '', status: 'loading' }
const messages: AIMessage[] = []
// 创建节流版本的 onUpdate
const throttledOnUpdate = onUpdate ? throttle(onUpdate, 400, { leading: true, trailing: true }) : undefined
try {
messages.push({ ...baseMessage, status: 'loading' })
const response = await fetch(url, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body,
...fetchOptions,
})
if (!response.ok) {
throw new Error(`${response.status}:${response.statusText}`)
}
if (response.body && typeof body === 'string' && body.includes('stream')) {
const stream = XStream({ readableStream: response.body })
for await (const chunk of stream) {
const { data } = chunk
if (data && !data.includes('[DONE]')) {
messages.push({ ...baseMessage, ...transformFn(JSON.parse(data), transform), status: 'typing' })
throttledOnUpdate?.(mergeFn(messages, merge))
}
}
messages.push({ ...baseMessage, status: 'success' })
} else {
const json = await response.json()
messages.push({ ...baseMessage, ...transformFn(json, transform), status: 'success' })
}
onSuccess?.(mergeFn(messages, merge))
} catch (error: any) {
console.error(error)
const errorMsg = error?.message || '请求失败'
if (error.name !== 'AbortError') {
antdMessage.error(errorMsg)
}
messages.push({ ...baseMessage, content: errorMsg, status: 'error' })
onError?.(mergeFn(messages, merge))
}
return mergeFn(messages, merge)
}
......@@ -11,10 +11,28 @@ export interface AIMessage {
full_content?: string
json?: any
status?: 'loading' | 'success' | 'error' | 'typing'
image_urls?: string[]
error?: string
}
export interface AIData {
export interface ChatRequestData {
response_format?: { type: 'text' | 'json_object' }
model?: string
messages: AIMessage[]
}
export interface GenerateImageRequestData {
model: string
prompt: string
n: number
aspect_ratio: string
subject_reference?: { type: string; image_file: string }[]
}
export interface GenerateVideoRequestData {
model: string
prompt: string
n: number
}
export type AIRequestData = ChatRequestData | GenerateImageRequestData | GenerateVideoRequestData
import { useState, useEffect, useCallback, useRef } from 'react'
import { AI_OPTIONS } from './config'
import aiService from './api'
import type { AIMessage, AIData } from './types'
import { SSEOptions } from '@/utils/sseRequest'
import type { AIMessage, AIRequestData, GenerateImageRequestData, GenerateVideoRequestData } from './types'
import { RequestOptions } from './request'
export function useAI(globalOptions?: SSEOptions) {
export function useAI(globalOptions?: RequestOptions) {
const [ai, setAI] = useState<string>(localStorage.getItem('ai') || 'Pro/deepseek-ai/DeepSeek-R1')
const [messages, setMessages] = useState<AIMessage[]>([])
const [message, setMessage] = useState<AIMessage | null>(null)
const [isLoading, setIsLoading] = useState<boolean>(false)
const [error, setError] = useState<string | null>(null)
const controllerRef = useRef<AbortController | null>(null)
useEffect(() => {
localStorage.setItem('ai', ai)
}, [ai])
......@@ -28,8 +27,16 @@ export function useAI(globalOptions?: SSEOptions) {
}
}, [])
const callback = (message: AIMessage) => {
setMessage(message)
setMessages((prev) => {
const messageIndex = prev.findIndex((item) => item.id === message.id)
return messageIndex === -1 ? [...prev, message] : prev.map((item) => (item.id === message.id ? message : item))
})
}
const post = useCallback(
async (data: AIData) => {
async (data: AIRequestData, method?: string) => {
// 如果已经有正在进行的请求,先取消它
abort()
......@@ -39,25 +46,21 @@ export function useAI(globalOptions?: SSEOptions) {
// 创建新的 controller
controllerRef.current = new AbortController()
// 添加用户消息
const userMessages = data.messages.filter((item) => item.role !== 'system')
setMessages((prev) => [...prev, ...userMessages])
if ('messages' in data) {
const userMessages = data.messages.filter((item) => item.role !== 'system')
setMessages((prev) => [...prev, ...userMessages])
}
aiService.post(
{ model: ai, ...data },
{
signal: controllerRef.current.signal,
onUpdate: (message) => {
setMessage(message)
setMessages((prev) => {
const messageIndex = prev.findIndex((item) => item.id === message.id)
return messageIndex === -1
? [...prev, message]
: prev.map((item) => (item.id === message.id ? message : item))
})
callback(message)
},
onSuccess: (message) => {
callback(message)
controllerRef.current = null
setIsLoading(false)
resolve(message)
......@@ -69,13 +72,24 @@ export function useAI(globalOptions?: SSEOptions) {
reject(err)
},
...globalOptions,
}
},
method
)
})
},
[ai, globalOptions, abort]
)
const clear = useCallback(() => {
setMessages([])
}, [])
// 文生图
const generateImage = useCallback(async (data: GenerateImageRequestData) => post(data, 'imageCreate'), [post])
// 文生视频
const generateVideo = useCallback(async (data: GenerateVideoRequestData) => post(data, 'videoCreate'), [post])
// 组件卸载时取消请求
useEffect(() => {
return () => {
......@@ -94,5 +108,8 @@ export function useAI(globalOptions?: SSEOptions) {
error,
clearError,
abort,
clear,
generateImage,
generateVideo,
}
}
import { create } from 'zustand'
import { AI_OPTIONS } from './config'
import aiService from './api'
import type { AIOption, AIMessage, AIData } from './types'
import type { AIOption, AIMessage, AIRequestData, ChatRequestData } from './types'
interface AIState {
ai: string
......@@ -14,7 +14,7 @@ interface AIState {
controller: AbortController | null
setAI: (ai: string) => void
toggleCollapsed: () => void
post: (data: AIData) => Promise<AIMessage>
post: (data: AIRequestData) => Promise<AIMessage>
clearError: () => void
}
......@@ -45,32 +45,37 @@ export const useAIStore = create<AIState>((set, get) => ({
collapsed: true,
isLoading: true,
error: null,
messages: [...messages, ...data.messages.filter((item) => item.role !== 'system')],
messages: [
...messages,
...(data as ChatRequestData).messages.filter((item: AIMessage) => item.role !== 'system'),
],
})
const callback = (message: AIMessage) => {
set((state) => {
const messageIndex = state.messages.findIndex((item) => item.id === message.id)
const messages =
messageIndex === -1
? [...state.messages, message]
: state.messages.map((msg) => (msg.id === message.id ? message : msg))
return { message, messages }
})
}
return new Promise<AIMessage>((resolve, reject) => {
aiService.post(
{ model: ai, ...data },
{
signal: controller.signal,
onUpdate: (message) => {
set((state) => {
const messageIndex = state.messages.findIndex((item) => item.id === message.id)
const messages =
messageIndex === -1
? [...state.messages, message]
: state.messages.map((msg) => (msg.id === message.id ? message : msg))
return { message, messages }
})
callback(message)
},
onSuccess: (message) => {
callback(message)
resolve(message)
set({ isLoading: false })
set({ isLoading: false, controller: null })
},
onError: (err) => {
reject(err)
set({ isLoading: false })
set({ error: err.message })
set({ isLoading: false, error: err.message, controller: null })
},
}
)
......
......@@ -5,7 +5,7 @@ import AIBubble from '@/ai/AIBubble'
export default function AIModal({ prompt }: { prompt: string }) {
const [open, setOpen] = useState(false)
const { post, isLoading, message } = useAI()
const { post, message } = useAI()
useEffect(() => {
if (open) {
......@@ -18,7 +18,7 @@ export default function AIModal({ prompt }: { prompt: string }) {
AI建议
</Button>
<Modal title="AI建议" open={open} footer={null} width={1000} onCancel={() => setOpen(false)} destroyOnClose>
<AIBubble loading={!message?.content} typing={isLoading} content={message?.content}></AIBubble>
<AIBubble loading={!message?.full_content} content={message?.full_content}></AIBubble>
</Modal>
</>
)
......
......@@ -22,7 +22,8 @@ export default function ButtonModal() {
}))
const selectOptions = resultsOptions.length ? resultsOptions : fieldOptions
const { isLoading, post } = useAIStore()
const isLoading = useAIStore((state) => state.isLoading)
const post = useAIStore((state) => state.post)
const handleAI = async () => {
const message = await post({
......
......@@ -22,7 +22,8 @@ export default function ButtonModal() {
}))
const selectOptions = resultsOptions.length ? resultsOptions : fieldOptions
const { isLoading, post } = useAIStore()
const isLoading = useAIStore((state) => state.isLoading)
const post = useAIStore((state) => state.post)
const handleAI = async () => {
const message = await post({
......
......@@ -24,7 +24,8 @@ export default function ButtonModal() {
const selectOptions = resultsOptions.length ? resultsOptions : fieldOptions
const { isLoading, post } = useAIStore()
const isLoading = useAIStore((state) => state.isLoading)
const post = useAIStore((state) => state.post)
const handleAI = async () => {
const message = await post({
......
......@@ -38,7 +38,8 @@ export default function ButtonModal() {
}))
const selectOptions = resultsOptions.length ? resultsOptions : fieldOptions
const { isLoading, post } = useAIStore()
const isLoading = useAIStore((state) => state.isLoading)
const post = useAIStore((state) => state.post)
const handleAI = async () => {
const message = await post({
......
......@@ -23,7 +23,8 @@ export default function ButtonModal() {
}
}, [searchParams])
const { isLoading, post } = useAIStore()
const isLoading = useAIStore((state) => state.isLoading)
const post = useAIStore((state) => state.post)
const handleAI = async () => {
const message = await post({
......
......@@ -21,7 +21,8 @@ export default function ButtonModal() {
value: fieldOptions.find((option) => option.label === result.name)?.value || '',
}))
const { isLoading, post } = useAIStore()
const isLoading = useAIStore((state) => state.isLoading)
const post = useAIStore((state) => state.post)
const handleAI = async () => {
const message = await post({
......
......@@ -21,7 +21,8 @@ export default function ButtonModal() {
value: fieldOptions.find((option) => option.label === result.name)?.value || '',
}))
const { isLoading, post } = useAIStore()
const isLoading = useAIStore((state) => state.isLoading)
const post = useAIStore((state) => state.post)
const handleAI = async () => {
const message = await post({
......
......@@ -21,7 +21,8 @@ export default function ButtonModal() {
value: fieldOptions.find((option) => option.label === result.name)?.value || '',
}))
const { isLoading, post } = useAIStore()
const isLoading = useAIStore((state) => state.isLoading)
const post = useAIStore((state) => state.post)
const handleAI = async () => {
const message = await post({
......
......@@ -23,7 +23,8 @@ export default function ButtonModal() {
value: fieldOptions.find((option) => option.label === result.name)?.value || '',
}))
const { isLoading, post } = useAIStore()
const isLoading = useAIStore((state) => state.isLoading)
const post = useAIStore((state) => state.post)
const handleAI = async () => {
const message = await post({
......
......@@ -15,7 +15,8 @@ interface ResultItem {
export default function ButtonModal() {
const [results, setResults] = useState<ResultItem[]>([])
const { isLoading, post } = useAIStore()
const isLoading = useAIStore((state) => state.isLoading)
const post = useAIStore((state) => state.post)
const handleAI = async () => {
const message = await post({
......
......@@ -5,7 +5,7 @@ import AIBubble from '@/ai/AIBubble'
export default function DataReport() {
const [open, setOpen] = useState(false)
const { post, isLoading, message } = useAI()
const { post, message } = useAI()
useEffect(() => {
if (open) {
......@@ -26,7 +26,7 @@ export default function DataReport() {
数据分析报告
</Button>
<Modal title="数据分析报告" open={open} footer={null} width={1000} onCancel={() => setOpen(false)} destroyOnClose>
<AIBubble loading={!message?.full_content} typing={isLoading} content={message?.full_content}></AIBubble>
<AIBubble loading={!message?.full_content} content={message?.full_content}></AIBubble>
</Modal>
</>
)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论