提交 350da781 authored 作者: 王鹏飞's avatar 王鹏飞

chore: update

上级 068ee8d5
import Markdown from 'react-markdown'
import remarkGfm from 'remark-gfm'
export default function MarkdownRender(props: { children?: string }) {
return <Markdown remarkPlugins={[remarkGfm]}>{props.children}</Markdown>
}
......@@ -2,9 +2,8 @@ import { useState, KeyboardEvent, useEffect, useRef } from 'react'
import { Button, Card, FloatButton, Input, Select } from 'antd'
import { CircleArrowLeft, CircleArrowRight } from 'lucide-react'
import { OpenAIOutlined, ArrowUpOutlined } from '@ant-design/icons'
import Markdown from 'react-markdown'
import remarkGfm from 'remark-gfm'
import { useAIStore, AIMessage } from '@/stores/ai'
import MarkdownRender from '../MarkdownRender'
import './AIChat.scss'
export const MessageItem = ({ message }: { message: AIMessage }) => {
......@@ -12,7 +11,7 @@ export const MessageItem = ({ message }: { message: AIMessage }) => {
<div className={`message-item ${message.role}`}>
<div className="message-box">
<div className="message-content">
<Markdown remarkPlugins={[remarkGfm]}>{message.content}</Markdown>
<MarkdownRender>{message.content}</MarkdownRender>
</div>
</div>
</div>
......
import { useState, useEffect, useCallback } from 'react'
import aiService, { AIMessage, AIData, AI_OPTIONS } from '@/utils/ai'
import aiService, { AIMessage, AIData, AI_OPTIONS, InitOptions } from '@/utils/ai'
export function useAI() {
export function useAI(globalOptions?: InitOptions) {
const [ai, setAI] = useState<string>(localStorage.getItem('ai') || 'qwen')
const [messages, setMessages] = useState<AIMessage[]>([])
const [message, setMessage] = useState<AIMessage | null>(null) // 存储最新的消息
const [isLoading, setIsLoading] = useState<boolean>(false)
useEffect(() => {
localStorage.setItem('ai', ai)
}, [ai])
const post = useCallback(
async (data: AIData) => {
async (data: AIData, options?: InitOptions) => {
setIsLoading(true)
setMessages((prevMessages) => [...prevMessages, ...data.messages.filter((item) => item.role !== 'system')])
try {
// 先添加用户消息(去掉 system 角色的消息)
const userMessages = data.messages.filter((item) => item.role !== 'system')
setMessages((prevMessages) => [...prevMessages, ...userMessages])
await aiService.post(ai, data, {
onUpdate: (response) => {
onMessage: (response) => {
setMessages((prevMessages) => {
const messageIndex = prevMessages.findIndex((msg) => msg.id === response.id)
if (messageIndex === -1) {
return [...prevMessages, { id: response.id, role: 'assistant', content: response.content }]
const newMessage: AIMessage = { id: response.id, role: 'assistant', content: response.content }
setMessage(newMessage) // 更新最新消息
return [...prevMessages, newMessage]
} else {
return prevMessages.map((msg) =>
msg.id === response.id ? { ...msg, content: msg.content + response.content } : msg
)
return prevMessages.map((msg) => {
if (msg.id === response.id) {
const updatedMessage = { ...msg, content: msg.content + response.content }
setMessage(updatedMessage) // 更新最新消息
return updatedMessage
}
return msg
})
}
})
},
onError: (err) => {
console.error('AI 请求失败:', err)
onerror: () => {
setIsLoading(false)
},
onComplete: () => {
onclose: () => {
setIsLoading(false)
},
...globalOptions,
...options,
})
} catch (err) {
console.error('AI 请求失败:', err)
setIsLoading(false)
}
},
[ai]
)
return { ai, setAI, options: AI_OPTIONS, post, messages, isLoading }
return { ai, setAI, options: AI_OPTIONS, post, messages, message, isLoading }
}
import { Button, Flex, Modal, Spin } from 'antd'
import { useEffect, useState } from 'react'
import { useAI } from '@/hooks/useAI'
import MarkdownRender from '@/components/MarkdownRender'
export default function DataReport() {
const [open, setOpen] = useState(false)
const { post, isLoading, message } = useAI()
useEffect(() => {
if (open) {
post({
messages: [
{
role: 'user',
content:
'作为数据分析师,请基于提供的数据集,生成一份结构化的数据质量分析报告。需包含字段解释、关系梳理、质量评估及改进建议。',
},
],
})
}
}, [open])
return (
<>
<Button type="primary" onClick={() => setOpen(true)}>
数据分析报告
</Button>
<Modal title="数据分析报告" open={open} footer={null} width={1000} onCancel={() => setOpen(false)} destroyOnClose>
<MarkdownRender>{message?.content}</MarkdownRender>
<Flex justify="center">
<Spin size="large" spinning={isLoading}></Spin>
</Flex>
</Modal>
</>
)
}
import { Link } from 'react-router'
import { Button, Empty, Flex, Space } from 'antd'
import DataWrap from '@/components/data/DataWrap'
import DataReport from '../components/DataReport'
// 无数据渲染
const EmptyRender = () => {
......@@ -37,7 +38,7 @@ export default function DataWriteMy() {
headerRender={(data) => (
<Flex justify="space-between" align="middle" style={{ marginBottom: '20px' }}>
<h4>数据集名称:{data.info.name}</h4>
<Button type="primary">数据质量分析报告</Button>
<DataReport />
</Flex>
)}
empty={<EmptyRender />}></DataWrap>
......
import { create } from 'zustand'
import aiService, { AIOption, AIMessage, AIData, AI_OPTIONS } from '@/utils/ai'
import aiService, { AIOption, AIMessage, AIData, AI_OPTIONS, InitOptions } from '@/utils/ai'
interface AIState {
ai: string
options: AIOption[]
message: AIMessage | null
messages: AIMessage[]
isLoading: boolean
collapsed: boolean
setAI: (ai: string) => void
toggleCollapsed: () => void
post: (data: AIData) => Promise<void>
post: (data: AIData, options?: InitOptions) => Promise<void>
}
export const useAIStore = create<AIState>((set, get) => ({
ai: localStorage.getItem('ai') || 'qwen',
options: AI_OPTIONS,
message: null,
messages: [],
isLoading: false,
collapsed: false,
......@@ -25,40 +27,52 @@ export const useAIStore = create<AIState>((set, get) => ({
toggleCollapsed: () => {
set((state) => ({ collapsed: !state.collapsed }))
},
post: async (data) => {
post: async (data, options) => {
const { ai, messages } = get()
// 处理用户消息(去掉 system 角色的消息)
const userMessages = data.messages.filter((item) => item.role !== 'system')
set({
collapsed: true,
isLoading: true,
messages: [...messages, ...data.messages.filter((item) => item.role !== 'system')],
messages: [...messages, ...userMessages],
})
try {
await aiService.post(ai, data, {
onUpdate: (response) => {
onMessage: (response) => {
set((state) => {
const messageIndex = state.messages.findIndex((msg) => msg.id === response.id)
if (messageIndex === -1) {
// 新的 AI 回复
const newMessage: AIMessage = { id: response.id, role: 'assistant', content: response.content }
return {
messages: [...state.messages, { id: response.id, role: 'assistant', content: response.content }],
message: newMessage, // 存储最新的 AI 消息
messages: [...state.messages, newMessage], // 追加到历史消息
}
} else {
return {
messages: state.messages.map((msg) =>
// 追加内容到已有的消息
const updatedMessages = state.messages.map((msg) =>
msg.id === response.id ? { ...msg, content: msg.content + response.content } : msg
),
)
return {
message: updatedMessages[messageIndex], // 更新最新的 AI 消息
messages: updatedMessages,
}
}
})
},
onError: (err) => {
onerror: (err) => {
console.error('AI 请求失败:', err)
set({ isLoading: false })
},
onComplete: () => {
onclose: () => {
set({ isLoading: false })
},
...options,
})
} catch (err) {
console.error('AI 请求失败:', err)
......
import md5 from 'blueimp-md5'
import axios from 'axios'
import { fetchEventSource } from '@fortaine/fetch-event-source'
import { fetchEventSource, FetchEventSourceInit } from '@fortaine/fetch-event-source'
export interface AIOption {
label: string
......@@ -20,15 +20,12 @@ export interface AIData {
}
export interface AIResponse {
content: string
id?: string
isStream?: boolean
content: string
}
export interface AIStreamHandlers {
onUpdate: (response: AIResponse) => void
onError: (error: any) => void
onComplete?: () => void
export interface InitOptions extends FetchEventSourceInit {
onMessage?: (message: AIResponse) => void
}
// Available AI options for different implementations
......@@ -39,7 +36,6 @@ export const AI_OPTIONS: AIOption[] = [
// { label: '天工', value: 'tiangong' },
]
// Individual AI service functions
export async function getYiyanAccessToken() {
const AK = 'wY7bvMpkWeZbDVq9w3EDvpjU'
const SK = 'XJwpiJWxs5HXkOtbo6tQrvYPZFJAWdAy'
......@@ -49,184 +45,113 @@ export async function getYiyanAccessToken() {
return resp.data.access_token
}
export async function fetchAIEventSource(url: string, options: FetchEventSourceInit) {
await fetchEventSource(url, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
async onopen(response) {
if (response.ok) return
else throw response
},
...options,
})
}
// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Fm2vrveyu
export async function yiyan(data: AIData, handlers: AIStreamHandlers): Promise<void> {
export async function yiyan(data: AIData, options: InitOptions): Promise<void> {
const accessToken = await getYiyanAccessToken()
const params = { stream: true, ...data }
await fetchEventSource(
await fetchAIEventSource(
`/api/qianfan/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant?access_token=${accessToken}`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
...options,
body: JSON.stringify(params),
async onopen(response) {
if (response.ok) {
return
} else {
throw response
}
},
onmessage(res) {
if (res.data === '[DONE]') {
handlers.onComplete?.()
return
}
try {
const message = JSON.parse(res.data)
handlers.onUpdate({ id: message.id, content: message.result || '' })
if (options.onMessage) options.onMessage({ id: message.id, content: message.result || '' } as any)
} catch (error) {
console.error(error)
}
},
onerror(err) {
handlers.onError(err)
},
onclose() {
if (handlers.onComplete) {
handlers.onComplete()
}
},
}
)
}
// https://api-docs.deepseek.com/zh-cn/api/create-chat-completion
export async function deepseek(data: AIData, handlers: AIStreamHandlers): Promise<void> {
export async function deepseek(data: AIData, options: InitOptions): Promise<void> {
const apiKey = 'sk-f1a6f0a7013241de8393cb2cb108e777'
const params = { model: 'deepseek-reasoner', stream: true, ...data }
await fetchEventSource('/api/deepseek/chat/completions', {
method: 'POST',
await fetchAIEventSource('/api/deepseek/chat/completions', {
...options,
headers: { 'Content-Type': 'application/json', Authorization: `Bearer ${apiKey}` },
body: JSON.stringify(params),
async onopen(response) {
if (response.ok) {
return
} else {
throw response
}
},
onmessage(res) {
if (res.data === '[DONE]') {
handlers.onComplete?.()
return
}
if (res.data === '[DONE]') return
try {
const message = JSON.parse(res.data)
if (message.choices && message.choices.length > 0) {
handlers.onUpdate({
id: message.id,
content: message.choices[0].delta?.content || '',
})
if (message.choices && message.choices.length > 0 && options.onMessage) {
options.onMessage({ id: message.id, content: message.choices[0].delta?.content || '' } as any)
}
} catch (error) {
console.error(error)
}
},
onerror(err) {
handlers.onError(err)
},
onclose() {
if (handlers.onComplete) {
handlers.onComplete()
}
},
})
}
// https://docs.siliconflow.cn/cn/api-reference/chat-completions/chat-completions
export async function siliconflow(data: AIData, handlers: AIStreamHandlers): Promise<void> {
export async function siliconflow(data: AIData, options: InitOptions): Promise<void> {
const apiKey = 'sk-bivnwauskdbvpspvmdorrgkrpwlyfxbfcezqsfsevowzubdj'
const params = { model: 'deepseek-ai/DeepSeek-R1', stream: true, ...data }
await fetchEventSource('/api/siliconflow/v1/chat/completions', {
method: 'POST',
await fetchAIEventSource('/api/siliconflow/v1/chat/completions', {
...options,
headers: { 'Content-Type': 'application/json', Authorization: `Bearer ${apiKey}` },
body: JSON.stringify(params),
async onopen(response) {
if (response.ok) {
return
} else {
throw response
}
},
onmessage(res) {
if (res.data === '[DONE]') {
handlers.onComplete?.()
return
}
if (res.data === '[DONE]') return
try {
const message = JSON.parse(res.data)
if (message.choices && message.choices.length > 0) {
handlers.onUpdate({
id: message.id,
content: message.choices[0].delta?.content || '',
})
if (message.choices && message.choices.length > 0 && options.onMessage) {
options.onMessage({ id: message.id, content: message.choices[0].delta?.content || '' } as any)
}
} catch (error) {
console.error(error)
}
},
onerror(err) {
handlers.onError(err)
},
onclose() {
if (handlers.onComplete) {
handlers.onComplete()
}
},
})
}
// https://help.aliyun.com/zh/model-studio/developer-reference/use-qwen-by-calling-api
export async function qwen(data: AIData, handlers: AIStreamHandlers): Promise<void> {
export async function qwen(data: AIData, options: InitOptions): Promise<void> {
const apiKey = 'sk-afd0fcdb53bf4058b2068b8548820150'
const params = { model: 'qwen-max-latest', stream: true, ...data }
await fetchEventSource('/api/qwen/compatible-mode/v1/chat/completions', {
method: 'POST',
await fetchAIEventSource('/api/qwen/compatible-mode/v1/chat/completions', {
...options,
headers: { 'Content-Type': 'application/json', Authorization: `Bearer ${apiKey}` },
body: JSON.stringify(params),
async onopen(response) {
if (response.ok) {
return
} else {
throw response
}
},
onmessage(res) {
if (res.data === '[DONE]') {
handlers.onComplete?.()
return
}
if (res.data === '[DONE]') return
try {
const message = JSON.parse(res.data)
if (message.choices && message.choices.length > 0) {
handlers.onUpdate({
id: message.id,
content: message.choices[0].delta?.content || '',
})
if (message.choices && message.choices.length > 0 && options.onMessage) {
options.onMessage({ id: message.id, content: message.choices[0].delta?.content || '' } as any)
}
} catch (error) {
console.error(error)
}
},
onerror(err) {
handlers.onError(err)
},
onclose() {
if (handlers.onComplete) {
handlers.onComplete()
}
},
})
}
export async function tiangong(data: AIData, handlers: AIStreamHandlers): Promise<void> {
export async function tiangong(data: AIData, options: InitOptions): Promise<void> {
const appKey = 'a8701b73637562d33a53c668a90ee3be'
const appSecret = 'e191593f486bb88a39c634f46926762dddc97b9082e192af'
const timestamp = Math.floor(Date.now() / 1000).toString()
const sign = md5(`${appKey}${appSecret}${timestamp}`)
await fetchEventSource('/api/tiangong/sky-saas-writing/api/v1/chat', {
method: 'POST',
await fetchAIEventSource('/api/tiangong/sky-saas-writing/api/v1/chat', {
...options,
headers: {
'Content-Type': 'application/json',
app_key: appKey,
......@@ -234,41 +159,19 @@ export async function tiangong(data: AIData, handlers: AIStreamHandlers): Promis
timestamp,
stream: 'true',
},
body: JSON.stringify({
chat_history: data.messages,
stream_resp_type: 'delta',
}),
async onopen(response) {
if (response.ok) {
return
} else {
throw response
}
},
body: JSON.stringify({ chat_history: data.messages, stream_resp_type: 'delta' }),
onmessage(res) {
if (res.data === '[DONE]') return
try {
const message = JSON.parse(res.data)
if (message.type !== 1) return
const messageId = message.conversation_id
const messageContent = message?.arguments?.[0]?.messages?.[0]?.text || ''
handlers.onUpdate({
id: messageId,
content: messageContent,
})
if (options.onMessage) options.onMessage({ id: messageId, content: messageContent } as any)
} catch (error) {
console.error(error)
}
},
onerror(err) {
handlers.onError(err)
},
onclose() {
if (handlers.onComplete) {
handlers.onComplete()
}
},
})
}
......@@ -280,7 +183,7 @@ const aiService = {
qwen,
tiangong,
async post(type: string, data: AIData, handlers: AIStreamHandlers): Promise<void> {
async post(type: string, data: AIData, options: InitOptions): Promise<void> {
const messages: AIMessage[] = []
const dataset = localStorage.getItem('dataset')
if (dataset) {
......@@ -290,19 +193,19 @@ const aiService = {
data.messages = [...messages, ...data.messages]
switch (type) {
case 'yiyan':
return yiyan(data, handlers)
return yiyan(data, options)
case 'deepseek':
return deepseek(data, handlers)
return deepseek(data, options)
case 'siliconflow':
return siliconflow(data, handlers)
return siliconflow(data, options)
case 'qwen':
return qwen(data, handlers)
return qwen(data, options)
case 'tiangong':
return tiangong(data, handlers)
return tiangong(data, options)
default:
throw new Error(`未找到对应的 AI 配置: ${type}`)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论