|
@@ -1,256 +0,0 @@
|
|
|
-import { useEffect, useState } from "react";
|
|
|
-import TextInputBar from "./TextInputBar";
|
|
|
-import VoiceInputBar from "./VoiceInputBar";
|
|
|
-import { textChat } from "@/service/bot";
|
|
|
-import { useTextChat } from "@/store/textChat";
|
|
|
-import { TAgentDetail } from "@/types/agent";
|
|
|
-import { delay, getLoginId, isSuccess } from "@/utils";
|
|
|
-import {
|
|
|
- useAudioPlayer
|
|
|
- } from "@/utils/audio";
|
|
|
-
|
|
|
-import Taro, { useUnload } from "@tarojs/taro";
|
|
|
-import { EChatRole, EContentType, TRobotMessage } from "@/types/bot";
|
|
|
-
|
|
|
-import { usePostMessage, saveRobotContentToServer } from './message'
|
|
|
-
|
|
|
-import { getRecommendPrompt } from "@/service/bot"
|
|
|
-
|
|
|
-
|
|
|
-interface Props {
|
|
|
- agent: TAgentDetail | null;
|
|
|
- setShowWelcome?: (b: boolean) => void;
|
|
|
- setIsVoice?: (b: boolean) => void;
|
|
|
- setDisabled?: (b: boolean) => void;
|
|
|
-}
|
|
|
-
|
|
|
-let stopReceiveChunk: (() => void) | undefined;
|
|
|
-export const useChatInput = ({ agent, setShowWelcome, setDisabled, }: Props) => {
|
|
|
- const {
|
|
|
- pushRobotMessage,
|
|
|
- updateRobotMessage,
|
|
|
- getCurrentRobotMessage,
|
|
|
- updateRobotReasoningMessage,
|
|
|
- pushMessage,
|
|
|
- updateMessage,
|
|
|
- deleteMessage,
|
|
|
- setQuestions,
|
|
|
- questions,
|
|
|
- } = useTextChat();
|
|
|
- const { startTimedMessage, stopTimedMessage, saveMessageToServer } = usePostMessage(getCurrentRobotMessage);
|
|
|
-
|
|
|
- const {
|
|
|
- setFistChunk,
|
|
|
- pushBase64ToQuene,
|
|
|
- playChunk,
|
|
|
- } = useAudioPlayer()
|
|
|
-
|
|
|
- let myMsgUk = '';
|
|
|
- let mySessionId = '';
|
|
|
-
|
|
|
-
|
|
|
- const chatWithGpt = async (message: string, sessionId: string, msgUk: string) => {
|
|
|
- setShowWelcome?.(false)
|
|
|
- setQuestions([])
|
|
|
- let currentRobotMsgUk = "";
|
|
|
- await delay(300);
|
|
|
- setDisabled?.(true);
|
|
|
- if (!agent?.agentId) {
|
|
|
- return;
|
|
|
- }
|
|
|
- const loginId = getLoginId();
|
|
|
- if (!loginId) {
|
|
|
- return;
|
|
|
- }
|
|
|
-
|
|
|
- // const greeting = "欢迎光临我的智能体,你想问什么?";
|
|
|
- // {
|
|
|
- // content: greeting,
|
|
|
- // contentType: EContentType.TextPlain,
|
|
|
- // role: EChatRole.System,
|
|
|
- // },
|
|
|
-
|
|
|
- const newMsg = {
|
|
|
- content: message,
|
|
|
- contentType: EContentType.TextPlain,
|
|
|
- role: EChatRole.User,
|
|
|
- }
|
|
|
- // 等发送的消息上报完成
|
|
|
- const myMsgResponse = await saveMessageToServer({
|
|
|
- loginId,
|
|
|
- messages: [{
|
|
|
- ...newMsg,
|
|
|
- saveStatus: 2,
|
|
|
- isStreaming: false,
|
|
|
- msgUk,
|
|
|
- }],
|
|
|
- agentId: agent.agentId,
|
|
|
- sessionId,
|
|
|
- })
|
|
|
-
|
|
|
- if(!isSuccess(myMsgResponse.status)){
|
|
|
- return setDisabled?.(false);
|
|
|
- }
|
|
|
- let isFirstChunk = true
|
|
|
-
|
|
|
- // 发起文本聊天
|
|
|
- stopReceiveChunk = textChat({
|
|
|
- params: {
|
|
|
- agentId: agent.agentId,
|
|
|
- isEnableOutputAudioStream: true,
|
|
|
- isEnableSearch: false,
|
|
|
- isEnableThinking: false,
|
|
|
- loginId,
|
|
|
- messages: [newMsg],
|
|
|
- sessionId,
|
|
|
- },
|
|
|
- onStart: () => {
|
|
|
- // 推一个空回复,用于展示 ui
|
|
|
- const blankMessage = {
|
|
|
- role: EChatRole.Assistant,
|
|
|
- contentType: EContentType.TextPlain,
|
|
|
- saveStatus: 0,
|
|
|
- content: "",
|
|
|
- reasoningContent: "",
|
|
|
- isStreaming: false,
|
|
|
- msgUk: currentRobotMsgUk,
|
|
|
- dislikeReason: '',
|
|
|
- robot: {
|
|
|
- avatar: agent?.avatarUrl ?? "",
|
|
|
- name: agent?.name ?? "",
|
|
|
- agentId: agent?.agentId ?? "",
|
|
|
- },
|
|
|
- }
|
|
|
- currentRobotMsgUk = pushRobotMessage(blankMessage);
|
|
|
- // 先暂停
|
|
|
- stopTimedMessage()
|
|
|
- // 开始定时发送,每5秒发送一次
|
|
|
- startTimedMessage({
|
|
|
- loginId,
|
|
|
- messages: [{
|
|
|
- ...blankMessage,
|
|
|
- }],
|
|
|
- agentId: agent.agentId ?? '',
|
|
|
- sessionId,
|
|
|
- }, 5000);
|
|
|
- isFirstChunk = true
|
|
|
- },
|
|
|
- onReceived: (m) => {
|
|
|
- // console.log("received:", m);
|
|
|
- if (m.reasoningContent) {
|
|
|
- updateRobotReasoningMessage(
|
|
|
- currentRobotMsgUk,
|
|
|
- m.reasoningContent,
|
|
|
- m.body,
|
|
|
- );
|
|
|
- } else {
|
|
|
- updateRobotMessage(m.content, m.body);
|
|
|
- // pushBase64ToQuene(m.body.content.audio)
|
|
|
- if(isFirstChunk){
|
|
|
- isFirstChunk = false
|
|
|
- setFistChunk(m.body.content.audio)
|
|
|
- }else{
|
|
|
- pushBase64ToQuene(m.body.content.audio)
|
|
|
- }
|
|
|
-
|
|
|
- playChunk();
|
|
|
- }
|
|
|
- },
|
|
|
- onFinished: async () => {
|
|
|
- const currentRobotMessage = getCurrentRobotMessage();
|
|
|
- console.log("回复完毕 ok, 当前robotmessage: ", currentRobotMessage);
|
|
|
-
|
|
|
- stopTimedMessage()
|
|
|
- if(!agent.agentId){
|
|
|
- return
|
|
|
- }
|
|
|
- setDisabled?.(false);
|
|
|
- // 如果没有任何回答,则显示
|
|
|
- if (!currentRobotMessage?.content?.length) {
|
|
|
- updateRobotMessage("服务器繁忙...");
|
|
|
- return
|
|
|
- }
|
|
|
-
|
|
|
- // 将智能体的回答保存至服务器
|
|
|
- // currentRobotMessage.content 保存的是当前完整的智能体回复信息文本
|
|
|
- const content = currentRobotMessage.content as string
|
|
|
- updateRobotMessage(content, currentRobotMessage?.body, 2, true)
|
|
|
-
|
|
|
- saveRobotContentToServer(currentRobotMessage, loginId, agent.agentId, sessionId)
|
|
|
-
|
|
|
-
|
|
|
- const response = await getRecommendPrompt({
|
|
|
- agentId: agent.agentId,
|
|
|
- sessionId,
|
|
|
- })
|
|
|
- // todo: 如果用户快速输入需要将前面的问题答案丢弃,根据 currentRobotMessage.msgUk 来设置 questions
|
|
|
- if(isSuccess(response.status)){
|
|
|
- setQuestions(response.data.questions)
|
|
|
- }
|
|
|
-
|
|
|
- },
|
|
|
- onComplete: async ()=> {
|
|
|
- stopTimedMessage()
|
|
|
- console.log('回复 onComplete')
|
|
|
- // 为防止服务端没有终止消息,当接口请求结束时强制再保存一次消息体,以防定时保存的消息体漏掉最后一部分
|
|
|
- const currentRobotMessage = getCurrentRobotMessage();
|
|
|
- if(currentRobotMessage && agent.agentId){
|
|
|
- saveRobotContentToServer(currentRobotMessage, loginId, agent.agentId, sessionId)
|
|
|
- }
|
|
|
-
|
|
|
- setDisabled?.(false);
|
|
|
-
|
|
|
- isFirstChunk = true
|
|
|
- },
|
|
|
- onError: () => {
|
|
|
- setDisabled?.(false);
|
|
|
- deleteMessage(currentRobotMsgUk);
|
|
|
- },
|
|
|
- });
|
|
|
- };
|
|
|
- const handleVoiceSend = (message: string) => {
|
|
|
- updateMessage(message, myMsgUk);
|
|
|
- chatWithGpt(message, mySessionId, myMsgUk);
|
|
|
- };
|
|
|
- const handleOnSend = async (message: string) => {
|
|
|
- if(!agent?.agentId){
|
|
|
- return
|
|
|
- }
|
|
|
- const {sessionId, msgUk} = pushMessage(message);
|
|
|
- chatWithGpt(message, sessionId, msgUk);
|
|
|
- };
|
|
|
-
|
|
|
- // 推一个自己的空气泡框
|
|
|
- const handleBeforeSend = () => {
|
|
|
- if(!agent?.agentId){
|
|
|
- return
|
|
|
- }
|
|
|
- const {sessionId, msgUk} = pushMessage("");
|
|
|
- myMsgUk = msgUk
|
|
|
- mySessionId = sessionId
|
|
|
- };
|
|
|
-
|
|
|
- // 发生主意识别错误时,删除当前自己发出的气泡框
|
|
|
- const handleVoiceError = () => {
|
|
|
- deleteMessage(myMsgUk);
|
|
|
- };
|
|
|
- useEffect(()=> {
|
|
|
- return ()=> {
|
|
|
- stopTimedMessage();
|
|
|
- }
|
|
|
- }, [])
|
|
|
- useUnload(() => {
|
|
|
- if (stopReceiveChunk) {
|
|
|
- stopReceiveChunk();
|
|
|
- stopTimedMessage();
|
|
|
- }
|
|
|
- });
|
|
|
- return {
|
|
|
- setQuestions,
|
|
|
- handleVoiceSend,
|
|
|
- handleOnSend,
|
|
|
- questions,
|
|
|
- handleBeforeSend,
|
|
|
- handleVoiceError,
|
|
|
- }
|
|
|
-}
|