Quellcode durchsuchen

feat: 更新语音转文字

sunxiao vor 1 Tag
Ursprung
Commit
1c2672c861

+ 1 - 1
src/components/RecodeItem/index.vue

@@ -98,7 +98,7 @@ const handleEmitParent = () => {
         <view class="status">{{ warningType?.label }}</view>
       </view>
       <view class="right">
-        <view class="btn">立即处理</view>
+        <view class="btn">立即查看</view>
         <uni-icons type="right" size="12" color="#2454FF"></uni-icons>
       </view>
     </view>

+ 71 - 132
src/components/chat/ChatInput.vue

@@ -1,8 +1,7 @@
 <script setup>
-// const plugin = requirePlugin("WechatSI")
-
 import { ref, unref, onMounted } from 'vue';
-import { baseURL } from '@/utils/https';
+
+const plugin = requirePlugin("WechatSI")
 
 const modelInpValue = defineModel();
 const emit = defineEmits(['on-submit']);
@@ -11,17 +10,10 @@ const modelLoading = defineModel('loading');
 const isRecording = ref(false);
 const recordingTip = ref('松开 发送');
 const showVolume = ref(false);
-const volumeLevel = ref(50); // 模拟音量级别
-let recorderManager = null;
-const audioPath = ref(null);
+const isLoad = ref(false);
 
 // 实时语音识别
-// const manager = plugin.getRecordRecognitionManager();
-
-
-console.log( "manager", manager );
-
-const innerAudioContext = wx.createInnerAudioContext();
+const manager = plugin.getRecordRecognitionManager();
 
 // 类型 input|voice
 const inpType = ref('input');
@@ -51,147 +43,84 @@ const onChangeInpType = () => {
   inpType.value = inpType.value === 'input' ? 'voice' : 'input';
 }
 
-const uploadAudio = (filePath) => {
-  uni.showLoading({
-    title: '上传中...'
-  });
-  uni.uploadFile({
-    url: 'https://your-server.com/upload',
-    filePath: filePath,
-    name: 'audio', // 后端接收文件的字段名
-    formData: {
-      // 可以附加其他表单数据
-      'userId': '123',
-      'timestamp': new Date().getTime()
-    },
-    success: (uploadRes) => {
-      uni.hideLoading();
-      console.log('上传成功', uploadRes);
-      // 处理服务器返回的数据
-      const data = JSON.parse(uploadRes.data);
-      uni.showToast({
-        title: '上传成功',
-        icon: 'success'
-      });
-    },
-    fail: (error) => {
-      uni.hideLoading();
-      console.log('上传失败', error);
-      uni.showToast({
-        title: '上传失败',
-        icon: 'none'
-      });
+const addManagerEventListener = () => {
+  manager.onStop = (res) => {
+    uni.hideLoading();
+    if (!res.result) {
+      return uni.showToast({ title: '未识别到讲话内容', duration: 3000, icon: 'none' });
     }
-  });
-}
-
-
-const playRecording = () => {
 
-  if (innerAudioContext) {
-    innerAudioContext.src = audioPath.value;
-    innerAudioContext.play();
+    modelInpValue.value = res.result;
 
-    innerAudioContext.onPlay(() => {
-      console.log('开始播放');
-    });
-
-    innerAudioContext.onError((err) => {
-      console.error('播放错误:', err);
-      uni.showToast({ title: '播放失败', icon: 'none' });
-    });
+    onSubmit();
+  }
+  
+  manager.onStart =  (res) => {
+    isRecording.value = true;
+    showVolume.value = true;
+    console.log("成功开始录音识别", res)
+  }
+  
+  manager.onError = (res) => {
+    uni.hideLoading();
+
+    isRecording.value = false;
+    showVolume.value = false;
+    
+    uni.showToast({ title: '语音识别失败', duration: 3000, icon: 'none' });
+    console.error("error msg", res.msg)
   }
 }
 
-// 语音播放相关
-const startRecording = e => {
-  isRecording.value = true;
-  showVolume.value = true;
-
-  recorderManager.start({
-    format: 'mp3',
-    duration: 60000, // 最长1分钟
-    sampleRate: 44100,
-    numberOfChannels: 1,
-    encodeBitRate: 192000,
-    frameSize: 50, // 帧大小,影响onFrameRecorded回调频率
-  });
-}
-
-function setupRecorderEvents() {
-  if (!recorderManager) return;
-
+// 开始录音
+const startRecording = () => {
+  if ( modelLoading.value ) {
+    return uni.showToast({ title: '当前有会话进行中', duration: 3000, icon: 'none' });
+  };
+  
   uni.authorize({
     scope: 'scope.record',
     success() {
-      recorderManager.onStart(() => {
-        console.log('录音开始');
-      });
-
-      recorderManager.onPause(() => {
-        console.log('录音暂停');
-      });
-
-      recorderManager.onStop((res) => {
-
-        audioPath.value = res.tempFilePath;
-
-        // setTimeout(() => {
-        //   innerAudioContext.src = 'https://dlink.host/musics/aHR0cHM6Ly9vbmVkcnYtbXkuc2hhcmVwb2ludC5jb20vOnU6L2cvcGVyc29uYWwvc3Rvcl9vbmVkcnZfb25taWNyb3NvZnRfY29tL0VjYzBzQUxiWFk5TWdHQl9GUVNkV2pJQm5wRmM0MktDZWpURnhhMjhELUdXeVE.mp3';
-        //   innerAudioContext.play();
-        // }, 1000)
-
-        console.log('录音停止', res);
-        // 这里可以处理录音结果,如上传或播放
-      });
-      recorderManager.onFrameRecorded((res) => {
-        // 可以在这里获取实时音量等信息
-        const volume = res.volumn; // 注意微信可能是volumn而不是volume
-        if (volume !== undefined) {
-          volumeLevel.value = Math.min(100, volume * 200); // 调整音量显示比例
-        }
-      });
+      manager.start({ lang: "zh_CN", duration: 60000 })
     },
     fail() {
-      uni.showToast({ title: '用户拒绝授权录音权限', icon: 'none' });
-    },
+      uni.showModal({
+        title: '提示',
+        content: '需要麦克风权限'
+      });
+    }
   });
-
 }
 
 function stopRecording() {
   if (!isRecording.value) return;
-
   isRecording.value = false;
   showVolume.value = false;
-
-  recorderManager.stop();
-
-  // 停止音量检测
-  // stopVolumeDetection();
+  uni.showLoading({
+    title: "识别中...",
+    mask: true
+  })
+  manager.stop();
 }
 
-// 初始化录音管理器
 onMounted(() => {
-  recorderManager = wx.getRecorderManager();
-
-  setupRecorderEvents();
-
-
+  addManagerEventListener();
+  // 后面需要补充一个加载的loading
+  setTimeout(() => {
+    isLoad.value = true;
+  }, 1500)
 });
+
 </script>
 
 <template>
+  <!-- 后续需要拆分成一个组件 -->
   <view class="voice-wrapper" v-show="isRecording">
     <view class="voice-inner">
       <view class="voice-card">
         <view class="voice-tip">
           <view class="la-line-scale-pulse-out">
-            <view></view>
-            <view></view>
-            <view></view>
-            <view></view>
-            <view></view>
+            <view v-for="item in 5" :key="item"></view>
           </view>
         </view>
       </view>
@@ -206,15 +135,23 @@ onMounted(() => {
   </view>
 
   <view class="chat-inp-container">
-    <!-- <view @click="playRecording">按钮</view> -->
     <view class="chat-inp-inner">
       <view class="voice-btn" @click="onChangeInpType">
         <TheSvgIcon class="icon" src="icon-voice" size="42"></TheSvgIcon>
       </view>
       <view class="inp-inner">
-        <textarea v-model.trim="modelInpValue" :show-confirm-bar="false" :cursor-spacing="30" auto-height
-          :maxlength="2000" class="chat-inp" placeholder="输入您的问题或需求" placeholder-style="color:#9A9A9A"
-          v-show="inpType == 'input'">
+        <textarea
+          v-model.trim="modelInpValue"
+          :show-confirm-bar="false"
+          :cursor-spacing="30"
+          :maxlength="2000"
+          :style="{ maxHeight: !isLoad ? '56rpx' : '200rpx' }"
+          auto-height
+          class="chat-inp"
+          placeholder="输入您的问题或需求"
+          placeholder-style="color:#9A9A9A"
+          v-show="inpType == 'input'"
+        >
         </textarea>
 
         <view class="chat-voice" v-show="inpType == 'voice'" @touchstart="startRecording" @touchend="stopRecording"
@@ -253,17 +190,16 @@ onMounted(() => {
     }
 
     .inp-inner {
-      width: 100%;
-      min-height: 56rpx;
       display: flex;
       align-items: center;
-      // padding-bottom: 10rpx;
+      width: 100%;
+      min-height: 56rpx;
+      padding: 0 16rpx;
 
       .chat-inp {
         width: 100%;
-        max-height: 200rpx;
         height: 100%;
-        padding: 0 16rpx;
+        // height: 28rpx;
         font-size: 28rpx;
         color: #333;
         box-sizing: border-box;
@@ -271,7 +207,10 @@ onMounted(() => {
 
       .chat-voice {
         width: 100%;
+        height: 100%;
+        padding: 12rpx 0;
         font-size: 28rpx;
+        font-weight: bold;
         text-align: center;
       }
     }

+ 4 - 0
src/components/chat/ChatTaskGroup.vue

@@ -33,16 +33,20 @@ const handleClick = (item, index) => {
 <style lang="scss" scoped>
 .task-group {
   display: flex;
+  align-items: center;
+  justify-content: center;
   width: 100%;
   height: 78rpx;
   padding: 0 60rpx 0 60rpx;
   overflow: hidden;
+  box-sizing: border-box;
 
   .task-btn {
     display: flex;
     align-items: center;
     justify-content: center;
     width: 200rpx;
+    // width: 50%;
     height: 76rpx;
     border-radius: 8px;
     background: #FFF;

+ 3 - 5
src/components/layout/BasePublicLayout.vue

@@ -8,7 +8,6 @@ const scrollTop = ref(0);
 
 const scrollView = ref(null);
 
-const scrollIntoView = ref(null);
 const instance = getCurrentInstance();
 
 const props = defineProps({
@@ -38,8 +37,8 @@ const handleScroll = (e) => {
 
 // 滚动到底部
 const scrollToBottom = () => {
-  scrollTop.value += 1;
-  nextTick(() => scrollTop.value = 999999);
+    scrollTop.value += 1;
+    nextTick(() => scrollTop.value = 999999);
 }
 
 // 滚动到底部 - 50px 阈值
@@ -55,7 +54,6 @@ onMounted(() => {
   domQuery.exec(res => {
     const [{ height }] = res;
     scrollViewHeight.value = height;
-    console.log( "scrollViewHeight.value", scrollViewHeight.value );
   });
 })
 
@@ -75,7 +73,6 @@ defineExpose({
       class="scroll-view"
       scroll-y
       :scrollTop="scrollTop"
-      :scroll-into-view="scrollIntoView"
       @scroll="handleScroll"
     >
       <view class="scroll-content">
@@ -116,6 +113,7 @@ defineExpose({
     width: 100%;
     padding-top: 16rpx;
   }
+
   .safeBottom {
     padding-bottom: env(safe-area-inset-bottom);
   }

+ 4 - 1
src/components/layout/BaseStatusBar.vue

@@ -1,6 +1,9 @@
 <script setup>
-const { safeAreaInsets } = uni.getSystemInfoSync()
+
+const { safeArea: safeAreaInsets } = wx.getWindowInfo();
+
 </script>
+
 <template>
   <view class="status-bar" :style="{height: safeAreaInsets.top + 'px'}"></view>
 </template>

+ 0 - 10
src/components/layout/TheStatusBar1.vue

@@ -1,10 +0,0 @@
-<script setup>
-const { safeAreaInsets } = uni.getSystemInfoSync()
-</script>
-<template>
-  <view class="status-bar" :style="{height: safeAreaInsets.top + 'px'}">
-
-
-    123123123
-  </view>
-</template>

+ 6 - 1
src/manifest.json

@@ -55,7 +55,12 @@
       "urlCheck": false
     },
     "usingComponents": true,
-    "requiredPermissions": ["scope.record"]
+    "plugins": {
+      "WechatSI": {
+        "version": "latest",
+        "provider": "wx069ba97219f66d99"
+      }
+    }
   },
   "mp-alipay": {
     "usingComponents": true

+ 10 - 7
src/pages/answer/index.vue

@@ -72,6 +72,7 @@ const onTaskClick = (item) => {
     selectedOption = item;
   } else {
     selectedOption = null;
+    inpValue.value = '';
   }
 }
 
@@ -139,7 +140,7 @@ const onRegenerate = ({ showVal, question, realQuestion, tools, uploadFileList }
         uploadFileList
       });
   
-      setTimeout(() => scrollRef.value.scrollToBottomIfAtBottom(), 100);
+      // setTimeout(() => scrollRef.value.scrollToBottomIfAtBottom(), 300);
     },
     onComplete: () => {
       isLoading.value = false;
@@ -157,12 +158,14 @@ const handleSubmit = async ({ showVal, question, selectedOption, realQuestion =
 
   isLoading.value = true;
 
-  const { data: sessionId } = await chatApi.getChatSessionTag();
-  currenSessionId.value = sessionId;
+  if ( !currenSessionId.value  ) {
+    const { data: sessionId } = await chatApi.getChatSessionTag();
+    currenSessionId.value = sessionId;
+  }
 
   addChat({
     id: '',
-    sessionId,
+    sessionId: currenSessionId.value,
     showVal,
     question,
     realQuestion,
@@ -223,7 +226,7 @@ const handleChatDetail = async ({ sessionId }) => {
 
   currenSessionId.value = sessionId;
 
-  setTimeout(() => scrollRef.value.scrollToBottom(), 100)
+  setTimeout(() => scrollRef.value.scrollToBottom(), 200)
 
 }
 
@@ -232,7 +235,7 @@ const getHelperList = async () => {
   const { data } = await chatApi.getHelperList();
   const result = getFormatYesterDay(data);
 
-  helperList.value = result.filter(({ tools }) => tools).map(item => ({
+  helperList.value = result.filter(({ tools }) => tools && tools != 'work_order').map(item => ({
     ...item,
     title: item.tools === 'work_order' ? '运行诊断' : item.title
   }));
@@ -280,7 +283,7 @@ defineExpose({
       </view>
 
       <view class="qa-container" v-if="chatDataSource.length">
-        <view class="qa-item" v-for="item, index in chatDataSource" :key="item.id">
+        <view class="qa-item" v-for="item, index in chatDataSource" :key="index">
           <ChatAsk :content="item.showVal" :sessionId="item.sessionId" :uploadFileList="item.uploadFileList"></ChatAsk>
           <ChatAnswer
             :id="item.id"

+ 1 - 1
src/uni_modules/zero-markdown-view/components/mp-html/node/node.vue

@@ -122,7 +122,7 @@ export default {
     return {
       ctrl: {},
       // #ifdef MP-WEIXIN
-      isiOS: uni.getSystemInfoSync().system.includes('iOS')
+      isiOS: uni.getDeviceInfo().system.includes('iOS')
       // #endif
     }
   },

+ 9 - 6
src/uni_modules/zero-markdown-view/components/mp-html/parser.js

@@ -75,12 +75,15 @@ const config = {
   }
 }
 const tagSelector={}
-const {
-  windowWidth,
-  // #ifdef MP-WEIXIN
-  system
-  // #endif
-} = uni.getSystemInfoSync()
+// const {
+//   windowWidth,
+//   // #ifdef MP-WEIXIN
+//   system
+//   // #endif
+// } = uni.getSystemInfoSync()
+
+const { system } = uni.getDeviceInfo();
+const { windowWidth } = uni.getWindowInfo();
 const blankChar = makeMap(' ,\r,\n,\t,\f')
 let idIndex = 0
 

+ 1 - 10
src/utils/streamRequest.js

@@ -6,14 +6,12 @@ const token = userStore.userInfo?.token;
 const baseURL = import.meta.env.VITE_APP_BASE_API;
 
 // stream request - chat
-export const streamChatRequest = async ({ data, onProgress, onSuccess, onComplete, onError, onAbort } ) => {
+export const streamChatRequest = async ({ data, onProgress, onSuccess, onComplete, onError } ) => {
 
   const url = baseURL + '/grpc/inferStreamRag'
   const Authorization = "Bearer " + token;
 
   let accumulatedText = "";
-  let isStopped = false;
-  let typedResult = "";
 
   const requestTask = uni.request({
     url,
@@ -25,24 +23,17 @@ export const streamChatRequest = async ({ data, onProgress, onSuccess, onComplet
     },
     data,
     success: () => {
-      console.log("success");
       onSuccess && onSuccess(accumulatedText);
     },
     fail: (err) => {
-      console.log("err", err);
       onError && onError(err);
     },
     complete: () => {
-
-    console.log("complete");
       onComplete && onComplete();
     }
   });
 
-  let fullMessage = "";
-
   requestTask.onChunkReceived(async (res) => {
-    console.log(res);
     const uint8Array = new Uint8Array(res.data);
     const chunkText = uint8ArrayToString(uint8Array);;
     accumulatedText += chunkText;