bzkf3 2 år sedan
förälder
incheckning
5773a46d01

+ 2 - 0
src/components/chat-stu-card/index.vue

@@ -6,6 +6,8 @@ const showMsg = $computed(() => {
   switch (props.d.dxz_last_msg_type) {
     case '1': return decodeURIComponent(props.d.dxz_last_msg);
     case '2': return '[ 图片 ]';
+    case '3': return decodeURIComponent(props.d.dxz_last_msg);
+    case '4': return decodeURIComponent(props.d.dxz_last_msg);
   }
 })
 </script>

+ 183 - 56
src/components/rtc-dialog/index.vue

@@ -4,20 +4,32 @@ import type { IAgoraRTCClient, IMicrophoneAudioTrack, ICameraVideoTrack } from "
 import { CHAT_STATUS, CHAT_OPERATION } from '~/types';
 import { UseDraggable } from '@vueuse/components'
 import request from '~/utils/request';
-import type { type_dyaw_xlfw_zxhd } from '~/types';
+import type { type_dyaw_xlfw_zxhd, type_dyaw_xlfw_zxhd_log } from '~/types';
 import user, { UserRole } from '~/store/user';
 import { socketSend } from '~/utils/ws';
 
-// const props = defineProps<{
-//   mode: 'audio' | 'video'
-// }>()
+const emits = defineEmits<{
+  (event: 'update-info', info: type_dyaw_xlfw_zxhd_log): void;
+}>()
+
+let reqDate: Partial<type_dyaw_xlfw_zxhd_log>
+//   = {
+//   dxz_id: dyaw_xlfw_zxhd.dxz_id,
+//   dxzl_stu_user_id: dyaw_xlfw_zxhd.dxz_stu_user_id,
+//   dxzl_stu_user_realname: dyaw_xlfw_zxhd.dxz_stu_user_realname,
+//   dxzl_tea_user_id: dyaw_xlfw_zxhd.dxz_tea_user_id,
+//   dxzl_tea_user_realname: dyaw_xlfw_zxhd.dxz_tea_user_realname,
+//   dxzl_last_msg_content: encodeURIComponent(inputValue),
+//   dxzl_type: inputValue.includes('><img') ? '2' : '1'
+// }
 
 let dyaw_xlfw_zxhd = $ref<type_dyaw_xlfw_zxhd>()
+let mode = $ref<'audio' | 'video'>()
 
 const otherInfo = $computed(() => {
-  if (UserRole === 'teacher')
-    return { id: dyaw_xlfw_zxhd?.dxz_tea_user_id, realname: dyaw_xlfw_zxhd?.dxz_tea_user_realname, avatar: dyaw_xlfw_zxhd?.dxx_tea_avatar }
   if (UserRole === 'student')
+    return { id: dyaw_xlfw_zxhd?.dxz_tea_user_id, realname: dyaw_xlfw_zxhd?.dxz_tea_user_realname, avatar: dyaw_xlfw_zxhd?.dxx_tea_avatar }
+  if (UserRole === 'teacher')
     return { id: dyaw_xlfw_zxhd?.dxz_stu_user_id, realname: dyaw_xlfw_zxhd?.dxz_stu_user_realname, avatar: dyaw_xlfw_zxhd?.dxx_user_avatar }
 })
 
@@ -29,31 +41,71 @@ function handleClose() {
   isOpen = false
 }
 defineExpose({
-  open(d: type_dyaw_xlfw_zxhd, mode: 'audio' | 'video') {
+  init(ws: WebSocket) {
+    ws2 = ws
+  },
+  open(d: type_dyaw_xlfw_zxhd, _mode: 'audio' | 'video') {
+    if (isOpen) return;
     // isOpen = true
     dyaw_xlfw_zxhd = d
+    reqDate = {
+      dxz_id: dyaw_xlfw_zxhd.dxz_id,
+      dxzl_stu_user_id: dyaw_xlfw_zxhd.dxz_stu_user_id,
+      dxzl_stu_user_realname: dyaw_xlfw_zxhd.dxz_stu_user_realname,
+      dxzl_tea_user_id: dyaw_xlfw_zxhd.dxz_tea_user_id,
+      dxzl_tea_user_realname: dyaw_xlfw_zxhd.dxz_tea_user_realname,
+    }
+
+    mode = _mode
     handleAudioChatStart()
   },
   close() {
     handleClose()
   },
-  publisher(ws: WebSocket, operate: CHAT_OPERATION) {
-    ws2 = ws
+  publisher(
+    content:
+      // type_dyaw_xlfw_zxhd_log &
+      {
+        operate: CHAT_OPERATION,
+        rtcOptions?: TRtcOptions,
+        mode?: 'audio' | 'video',
+        dyaw_xlfw_zxhd?: type_dyaw_xlfw_zxhd,
+        fullSendData?: type_dyaw_xlfw_zxhd_log
+      }
+  ) {
+    console.log('publisher');
+    //
+    const { operate } = content
     switch (operate) {
       case CHAT_OPERATION.START:
         audioChatStatus = CHAT_STATUS.WAITING_YOU_ACCEPT
+        rtcOptions = content.rtcOptions!;
+        mode = content.mode!;
+        dyaw_xlfw_zxhd = content.dyaw_xlfw_zxhd!
+        reqDate = {
+          dxz_id: dyaw_xlfw_zxhd.dxz_id,
+          dxzl_stu_user_id: dyaw_xlfw_zxhd.dxz_stu_user_id,
+          dxzl_stu_user_realname: dyaw_xlfw_zxhd.dxz_stu_user_realname,
+          dxzl_tea_user_id: dyaw_xlfw_zxhd.dxz_tea_user_id,
+          dxzl_tea_user_realname: dyaw_xlfw_zxhd.dxz_tea_user_realname,
+        }
         isOpen = true
+        emits('update-info', content.fullSendData!)
+
         break;
       case CHAT_OPERATION.CANCEL:
+        // emits('update-info', content.fullSendData!)
         isOpen = false
         break;
       case CHAT_OPERATION.ACCEPT:
         audioChatStatus = CHAT_STATUS.CHATING
         break;
       case CHAT_OPERATION.DENY:
+        // emits('update-info', content.fullSendData!)
         isOpen = false
         break;
       case CHAT_OPERATION.END:
+        // emits('update-info', content.fullSendData!)
         isOpen = false
         break;
       default:
@@ -62,6 +114,42 @@ defineExpose({
   }
 })
 
+function handleInfoAdd(tip: string) {
+  const _reqDate = Object.assign({
+    dxzl_type: mode === 'audio' ? '3' : '4',
+    dxzl_last_msg_content: encodeURIComponent(`【${mode === 'audio' ? '语音通话' : '视频通话'}】`),
+  }, reqDate)
+  return request({
+    url: '/dyaw/xlfw_zxhd_log/add',
+    data: {
+      dyaw_xlfw_zxhd_log: _reqDate
+    }
+  }).then(res => {
+    if (res.code === '1') {
+      const fullSendData = {
+        create_user_id: user.user_id,
+        create_dateline: Date.now().toString().slice(0, 10),
+        ..._reqDate,
+        dxzl_id: `${res.data.insert_id}`
+      } as type_dyaw_xlfw_zxhd_log
+      emits('update-info', fullSendData)
+      return fullSendData
+    }
+  })
+}
+
+function handleInfoEdit(id: string, data: Partial<type_dyaw_xlfw_zxhd_log>, tip?: string) {
+  const _reqDate = Object.assign({
+    dxzl_last_msg_content: tip ?? encodeURIComponent(`【${mode === 'audio' ? '语音通话' : '视频通话'}】 ${tip}`),
+  }, data)
+  return request({
+    url: '/dyaw/xlfw_zxhd_log/edit',
+    data: {
+      dxzl_id: id,
+      dyaw_xlfw_zxhd_log: _reqDate
+    }
+  })
+}
 
 const LocalPlayerContainerRef = $ref<HTMLElement>()
 const RemotePlayerContainerRef = $ref<HTMLElement>()
@@ -77,71 +165,106 @@ let rtcInstance: {
   localAudioTrack: undefined,
   localVideoTrack: undefined,
 }
+type TRtcOptions = {
+  appId: string;
+  channel: string;
+  token: string;
+}
+let rtcOptions: TRtcOptions;
+
+function initRtcClient() {
+  if (rtcInstance.client) return;
+  let client = rtcInstance.client = AgoraRTC.createClient({ mode: "rtc", codec: "vp8" });
+
+  client.on("user-published", async (user, mediaType) => {
+
+    await client.subscribe(user, mediaType);
+
+    if (mediaType === "audio") {
+      const audioTrack = user.audioTrack;
+      audioTrack?.play();
+    } else {
+      const videoTrack = user.videoTrack;
+      videoTrack?.play(RemotePlayerContainerRef as HTMLElement);
+    }
+  });
+}
 
 
 async function handleAudioChatStart() {
   try {
-    await AgoraRTC.getMicrophones()
-    // await AgoraRTC.getCameras()
-    let rtcOptions;
+    audioChatStatus = CHAT_STATUS.WAITING_OTHERS_ACCEPT
+    let isBusy;
     await request({
-      url: '/dyaw/xlfw_zxhd/get_rtc_token',
+      url: '/dyaw/xlfw_zxhd/get_user_status',
       data: {
-        dxz_id: dyaw_xlfw_zxhd?.dxz_id
+        user_id: otherInfo?.id
       }
-    }).then(async res => {
+    }).then(res => {
       if (res.code === '1') {
-        let resp: { jgim_roomid: string; rtc_appid: string; rtc_token: string } = res.data.one_info
-        rtcOptions = {
-          appId: resp.rtc_appid,
-          // channel: resp.jgim_roomid,
-          channel: dyaw_xlfw_zxhd!.dxz_id,
-          token: resp.rtc_token,
-          uid: user.user_id
-        }
-
-        let client = rtcInstance.client = AgoraRTC.createClient({ mode: "rtc", codec: "vp8" });
-
-        client.on("user-published", async (user, mediaType) => {
-          // 发起订阅
-          await client.subscribe(user, mediaType);
-
-          // 如果订阅的是音频轨道
-          if (mediaType === "audio") {
-            const audioTrack = user.audioTrack;
-            // 自动播放音频
-            audioTrack?.play();
-          } else {
-            const videoTrack = user.videoTrack;
-            // 自动播放视频
-            videoTrack?.play(RemotePlayerContainerRef as HTMLElement);
-          }
-        });
-
-        await rtcInstance.client!.join(rtcOptions.appId, rtcOptions.channel, rtcOptions?.token, rtcOptions.uid);
-        rtcInstance.localAudioTrack = await AgoraRTC.createMicrophoneAudioTrack();
-        // rtcInstance.localVideoTrack = await AgoraRTC.createCameraVideoTrack();
-        await rtcInstance.client!.publish([rtcInstance.localAudioTrack]);
-        // rtcInstance.localVideoTrack.play(LocalPlayerContainerRef as HTMLElement);
+        isBusy = !!res.data.status
       }
     })
+    if (isBusy) {
+      // busy operation
+      audioChatStatus = CHAT_STATUS.WAITING_BUSY
+      setTimeout(() => {
+        handleClose()
+      }, 2000)
+      return
+    };
+    // await AgoraRTC.getMicrophones()
+    // await AgoraRTC.getCameras()
+    // await request({
+    //   url: '/dyaw/xlfw_zxhd/get_rtc_token',
+    //   data: {
+    //     dxz_id: dyaw_xlfw_zxhd?.dxz_id
+    //   }
+    // }).then(async res => {
+    //   if (res.code === '1') {
+    //     let resp: { jgim_roomid: string; rtc_appid: string; rtc_token: string } = res.data.one_info
+    //     rtcOptions = {
+    //       appId: resp.rtc_appid,
+    //       // channel: resp.jgim_roomid,
+    //       channel: dyaw_xlfw_zxhd!.dxz_id,
+    //       token: resp.rtc_token,
+    //       // uid: user.user_id
+    //     }
+
+    //     initRtcClient()
+
+    //     await rtcInstance.client!.join(rtcOptions.appId, rtcOptions.channel, rtcOptions?.token, /*rtcOptions.uid*/ user.user_id);
+    //     rtcInstance.localAudioTrack = await AgoraRTC.createMicrophoneAudioTrack();
+    //     await rtcInstance.client!.publish(rtcInstance.localAudioTrack);
+    //     // rtcInstance.localVideoTrack = await AgoraRTC.createCameraVideoTrack();
+    //     // rtcInstance.localVideoTrack.play(LocalPlayerContainerRef as HTMLElement);
+    //     // await rtcInstance.client!.publish(rtcInstance.localVideoTrack);
+    //   }
+    // })
+    const fullSendData = await handleInfoAdd('拨号中')
+
     isOpen = true
     socketSend(ws2, {
       dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
       dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
       operate: CHAT_OPERATION.START,
-      rtcOptions
+      mode,
+      rtcOptions,
+      dyaw_xlfw_zxhd,
+      dxzl_id: fullSendData?.dxzl_id
     })
   } catch (error) {
     console.error(error);
   }
 
 }
-function handleAudioChatCancel() {
+async function handleAudioChatCancel() {
+  const fullSendData = await handleInfoEdit('已取消')
   socketSend(ws2, {
     dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
     dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
-    operate: CHAT_OPERATION.CANCEL
+    operate: CHAT_OPERATION.CANCEL,
+    fullSendData
   })
   isOpen = false
 }
@@ -153,19 +276,23 @@ function handleAudioChatAccept() {
   })
   audioChatStatus = CHAT_STATUS.CHATING
 }
-function handleAudioChatDeny() {
+async function handleAudioChatDeny() {
+  const fullSendData = await handleInfoEdit('已拒接')
   socketSend(ws2, {
     dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
     dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
-    operate: CHAT_OPERATION.DENY
+    operate: CHAT_OPERATION.DENY,
+    fullSendData
   })
   isOpen = false
 }
-function handleAudioChatEnd() {
+async function handleAudioChatEnd() {
+  const fullSendData = await handleInfoEdit('已结束')
   socketSend(ws2, {
     dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
     dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
-    operate: CHAT_OPERATION.END
+    operate: CHAT_OPERATION.END,
+    fullSendData
   })
   isOpen = false
 }
@@ -198,8 +325,8 @@ function handleAudioChatEnd() {
           </div>
           <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"
             v-show="audioChatStatus === CHAT_STATUS.WAITING_BUSY">
-            <div class="text-red-500">对方忙线中请等待</div>
-            <div class="text-red-500">当前排队:{{ 4 }}</div>
+            <div class="text-red-500">对方忙线中</div>
+            <div class="text-red-500">请稍后再试</div>
             <i:line-md:loading-alt-loop class="text-xl" />
           </div>
           <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"

+ 20 - 145
src/pages/student/consult.vue

@@ -1,11 +1,10 @@
 <script setup lang="ts">
-import AgoraRTC from "agora-rtc-sdk-ng"
-import type { IAgoraRTCClient, IMicrophoneAudioTrack, ICameraVideoTrack } from "agora-rtc-sdk-ng"
 import type { type_dyaw_xlfw_zxhd, type_dyaw_xlfw_zxhd_log } from '~/types';
 import user from '~/store/user';
 import { createSocket, socketSend } from '~/utils/ws';
 import type { TSocketRes } from '~/utils/ws';
 import { formatTimestamp } from '~/utils/time'
+import { CHAT_STATUS, CHAT_OPERATION } from '~/types';
 
 const router = useRouter()
 let teacher
@@ -161,105 +160,38 @@ watch(
 // chat audio/video
 // ==========
 
-let rtcInstance: {
-  client?: IAgoraRTCClient;
-  localAudioTrack?: IMicrophoneAudioTrack;
-  localVideoTrack?: ICameraVideoTrack
-} = {
-  client: undefined,
-  localAudioTrack: undefined,
-  localVideoTrack: undefined,
-}
+let RtcDialogRef = $ref<typeof import("~/components/rtc-dialog/index.vue")['default']>()
 
 const ws2 = createSocket(
   { teacher: teacher.user_id, student: '*' },
   {
     message(socketRes: TSocketRes<type_dyaw_xlfw_zxhd_log & { operate: CHAT_OPERATION }>) {
+      console.log('enter', socketRes);
       if (socketRes.from_client_name.endsWith('teacher')) {
-        // infoList.push(socketRes.content)
         if (socketRes.content.dxzl_stu_user_id === user.user_id) {
-          switch (socketRes.content.operate) {
-            case CHAT_OPERATION.START:
-              audioChatStatus = CHAT_STATUS.WAITING_YOU_ACCEPT
-              ChatAudioRef!.open()
-              break;
-            case CHAT_OPERATION.CANCEL:
-              ChatAudioRef!.close()
-              break;
-            case CHAT_OPERATION.ACCEPT:
-              audioChatStatus = CHAT_STATUS.CHATING
-              break;
-            case CHAT_OPERATION.DENY:
-              ChatAudioRef!.close()
-              break;
-            case CHAT_OPERATION.END:
-              ChatAudioRef!.close()
-              break;
-            default:
-              break;
-          }
+          RtcDialogRef!.publisher(socketRes.content)
         }
       }
     }
   }
 )
+onMounted(() => {
+  RtcDialogRef!.init(ws2)
+})
 
-enum CHAT_STATUS { 'WAITING_YOU_ACCEPT', 'WAITING_OTHERS_ACCEPT', 'WAITING_BUSY', 'CHATING' }
-enum CHAT_OPERATION { 'START', 'CANCEL', 'ACCEPT', 'DENY', 'END' }
-let audioChatStatus = $ref<CHAT_STATUS>(CHAT_STATUS.WAITING_OTHERS_ACCEPT)
-
-const ChatAudioRef = $ref<typeof import("~/components/chat-dialog/index.vue")['default']>()
-const ChatVideoRef = $ref<typeof import("~/components/chat-dialog/index.vue")['default']>()
 
-function handleAudioChatStart() {
-  ChatAudioRef!.open()
-  socketSend(ws2, {
-    dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
-    dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
-    operate: CHAT_OPERATION.START
-  })
-}
-function handleAudioChatCancel() {
-  socketSend(ws2, {
-    dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
-    dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
-    operate: CHAT_OPERATION.CANCEL
-  })
-  ChatAudioRef!.close()
-}
-function handleAudioChatAccept() {
-  socketSend(ws2, {
-    dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
-    dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
-    operate: CHAT_OPERATION.ACCEPT
-  })
-  // ChatAudioRef!.close()
-  audioChatStatus = CHAT_STATUS.CHATING
+async function handleAudioChatStart() {
+  RtcDialogRef!.open(dyaw_xlfw_zxhd, 'audio')
 }
-function handleAudioChatDeny() {
-  socketSend(ws2, {
-    dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
-    dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
-    operate: CHAT_OPERATION.DENY
-  })
-  ChatAudioRef!.close()
-}
-function handleAudioChatEnd() {
-  socketSend(ws2, {
-    dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
-    dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
-    operate: CHAT_OPERATION.END
-  })
-  ChatAudioRef!.close()
+
+async function handleVideoChatStart() {
+  RtcDialogRef!.open(dyaw_xlfw_zxhd, 'video')
 }
-function handleVideoChatStart() {
-  request({
-    url: '/dyaw/xlfw_zxhd/get_rtc_token',
-    data: {
-      dxz_id: dyaw_xlfw_zxhd?.dxz_id
-    }
-  })
-  // ChatVideoRef!.open()
+
+function emitUpdateInfo(info: type_dyaw_xlfw_zxhd_log) {
+  console.log('info :>> ', info);
+  if (info.dxz_id === dyaw_xlfw_zxhd.dxz_id)
+    infoList.push(info)
 }
 </script>
 
@@ -324,72 +256,15 @@ function handleVideoChatStart() {
   </el-dialog>
 
 
-  <chat-dialog ref="ChatAudioRef">
-    <div class="h-full flex_center flex-col text-light-50 space-y-4">
-      <el-avatar :size="158" :src="teacherInfo?.dxp_user_avatar"></el-avatar>
-      <div>{{ teacherInfo?.dxp_user_realname }}</div>
-      <!-- <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"> -->
-      <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"
-        v-show="audioChatStatus === CHAT_STATUS.WAITING_OTHERS_ACCEPT">
-        <div>正在等待对方接受邀请</div>
-        <i:line-md:loading-alt-loop class="text-xl" />
-      </div>
-      <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"
-        v-show="audioChatStatus === CHAT_STATUS.WAITING_YOU_ACCEPT">
-        <div>邀请你语音通话...</div>
-      </div>
-      <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"
-        v-show="audioChatStatus === CHAT_STATUS.WAITING_BUSY">
-        <div class="text-red-500">对方忙线中请等待</div>
-        <div class="text-red-500">当前排队:{{ 4 }}</div>
-        <i:line-md:loading-alt-loop class="text-xl" />
-      </div>
-      <div class="text-hex-909090 flex_center flex-col space-y-2 h-16" v-show="audioChatStatus === CHAT_STATUS.CHATING">
-        <div>正在通话中</div>
-        <div>{{ '00:30' }}</div>
-      </div>
-      <!-- </div> -->
-
-      <div class="pt-16 text-xl flex justify-around w-full">
-        <div v-show="audioChatStatus === CHAT_STATUS.WAITING_YOU_ACCEPT"
-          class="bg-green-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
-          @click="handleAudioChatAccept">
-          <i:ic:baseline-phone />
-        </div>
-        <div v-show="audioChatStatus === CHAT_STATUS.WAITING_YOU_ACCEPT"
-          class="bg-red-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
-          @click="handleAudioChatDeny">
-          <i:mdi:phone-hangup />
-        </div>
-        <div v-show="audioChatStatus === CHAT_STATUS.CHATING"
-          class="bg-hex-efefef text-hex-272636 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
-          @click="">
-          <i:ant-design:audio-outlined v-show="true" />
-          <i:ant-design:audio-muted-outlined v-show="false" />
-        </div>
-        <div v-show="audioChatStatus === CHAT_STATUS.CHATING"
-          class="bg-red-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
-          @click="handleAudioChatEnd">
-          <i:ic:outline-close></i:ic:outline-close>
-        </div>
-        <div
-          v-show="audioChatStatus === CHAT_STATUS.WAITING_OTHERS_ACCEPT || audioChatStatus === CHAT_STATUS.WAITING_BUSY"
-          class="bg-red-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
-          @click="handleAudioChatCancel">
-          <i:ic:outline-close></i:ic:outline-close>
-        </div>
-
-      </div>
-    </div>
-  </chat-dialog>
+  <rtc-dialog ref="RtcDialogRef" @update-info="emitUpdateInfo"></rtc-dialog>
 
-  <chat-dialog ref="ChatVideoRef">
+  <!-- <chat-dialog ref="ChatVideoRef">
     <i:ant-design:audio-outlined />
     <i:ant-design:audio-muted-outlined />
 
     <i:material-symbols:video-camera-back-rounded />
     <i:material-symbols:video-camera-front-off-rounded />
-  </chat-dialog>
+  </chat-dialog> -->
 </template>
 
 <style scoped lang="scss">

+ 13 - 8
src/pages/teacher/consult.vue

@@ -225,30 +225,35 @@ const ws2 = createSocket(
   { teacher: user.user_id, student: '*' },
   {
     message(socketRes: TSocketRes<type_dyaw_xlfw_zxhd_log & { operate: CHAT_OPERATION }>) {
+      console.log('enter', socketRes);
       if (socketRes.from_client_name.endsWith('student')) {
         // infoList.push(socketRes.content)
         if (socketRes.content.dxzl_tea_user_id === user.user_id) {
-          console.log('operate:', socketRes.content.operate);
-          RtcDialogRef!.publisher(ws2, socketRes.content.operate)
+          RtcDialogRef!.publisher(socketRes.content)
         }
       }
     }
   }
 )
+onMounted(() => {
+  RtcDialogRef!.init(ws2)
+})
 
 
 
 async function handleAudioChatStart() {
-  RtcDialogRef!.open(dyaw_xlfw_zxhd,'audio')
+  RtcDialogRef!.open(dyaw_xlfw_zxhd, 'audio')
 }
 
-
-
-
 async function handleVideoChatStart() {
-
+  RtcDialogRef!.open(dyaw_xlfw_zxhd, 'video')
 }
 
+function emitUpdateInfo(info: type_dyaw_xlfw_zxhd_log) {
+  console.log('info :>> ', info);
+  if (info.dxz_id === dyaw_xlfw_zxhd?.dxz_id)
+    infoList.push(info)
+}
 </script>
 
 <template>
@@ -315,7 +320,7 @@ async function handleVideoChatStart() {
 
 
 
-  <rtc-dialog ref="RtcDialogRef"></rtc-dialog>
+  <rtc-dialog ref="RtcDialogRef" @update-info="emitUpdateInfo"></rtc-dialog>
 
 </template>