|
@@ -0,0 +1,474 @@
|
|
|
+<script setup lang="ts">
|
|
|
+import AgoraRTC from "agora-rtc-sdk-ng"
|
|
|
+import type { IAgoraRTCClient, IMicrophoneAudioTrack, ICameraVideoTrack } from "agora-rtc-sdk-ng"
|
|
|
+import { CHAT_STATUS, CHAT_OPERATION } from '~/types';
|
|
|
+import { UseDraggable } from '@vueuse/components'
|
|
|
+import request from '~/utils/request';
|
|
|
+import type { type_dyaw_xlfw_zxhd, type_dyaw_xlfw_zxhd_log } from '~/types';
|
|
|
+import user, { UserRole } from '~/store/user';
|
|
|
+import { socketSend } from '~/utils/ws';
|
|
|
+import { formatOffsetSec } from '~/utils/time';
|
|
|
+
|
|
|
+let offsetTimer: NodeJS.Timeout
|
|
|
+let offsetTime: number
|
|
|
+
|
|
|
+const emits = defineEmits<{
|
|
|
+ (event: 'update-info', info: Partial<type_dyaw_xlfw_zxhd_log>, isUpdate?: boolean): void;
|
|
|
+}>()
|
|
|
+
|
|
|
+let reqDate: Partial<type_dyaw_xlfw_zxhd_log>
|
|
|
+
|
|
|
+let dyaw_xlfw_zxhd = $ref<type_dyaw_xlfw_zxhd>()
|
|
|
+let mode = $ref<'audio' | 'video'>()
|
|
|
+let dxzl_id = $ref<string>()
|
|
|
+
|
|
|
+const otherInfo = $computed(() => {
|
|
|
+ if (UserRole === 'student')
|
|
|
+ return { id: dyaw_xlfw_zxhd?.dxz_tea_user_id, realname: dyaw_xlfw_zxhd?.dxz_tea_user_realname, avatar: dyaw_xlfw_zxhd?.dxx_tea_avatar }
|
|
|
+ if (UserRole === 'teacher')
|
|
|
+ return { id: dyaw_xlfw_zxhd?.dxz_stu_user_id, realname: dyaw_xlfw_zxhd?.dxz_stu_user_realname, avatar: dyaw_xlfw_zxhd?.dxx_user_avatar }
|
|
|
+})
|
|
|
+
|
|
|
+let isOpen = $ref<boolean>(false)
|
|
|
+let currentChatStatus = $ref<CHAT_STATUS>(CHAT_STATUS.WAITING_OTHERS_ACCEPT)
|
|
|
+
|
|
|
+let ws2: WebSocket;
|
|
|
+function handleClose() {
|
|
|
+ isOpen = false
|
|
|
+}
|
|
|
+defineExpose({
|
|
|
+ init(ws: WebSocket) {
|
|
|
+ ws2 = ws
|
|
|
+ },
|
|
|
+ open(d: type_dyaw_xlfw_zxhd, _mode: 'audio' | 'video') {
|
|
|
+ if (isOpen) return;
|
|
|
+ // isOpen = true
|
|
|
+ dyaw_xlfw_zxhd = d
|
|
|
+ reqDate = {
|
|
|
+ dxz_id: dyaw_xlfw_zxhd.dxz_id,
|
|
|
+ dxzl_stu_user_id: dyaw_xlfw_zxhd.dxz_stu_user_id,
|
|
|
+ dxzl_stu_user_realname: dyaw_xlfw_zxhd.dxz_stu_user_realname,
|
|
|
+ dxzl_tea_user_id: dyaw_xlfw_zxhd.dxz_tea_user_id,
|
|
|
+ dxzl_tea_user_realname: dyaw_xlfw_zxhd.dxz_tea_user_realname,
|
|
|
+ }
|
|
|
+
|
|
|
+ mode = _mode
|
|
|
+ handleAudioChatStart()
|
|
|
+ },
|
|
|
+ close() {
|
|
|
+ handleClose()
|
|
|
+ },
|
|
|
+ async publisher(
|
|
|
+ content:
|
|
|
+ // type_dyaw_xlfw_zxhd_log &
|
|
|
+ {
|
|
|
+ operate: CHAT_OPERATION
|
|
|
+ // rtcOptions?: TRtcOptions
|
|
|
+ mode?: 'audio' | 'video'
|
|
|
+ dyaw_xlfw_zxhd?: type_dyaw_xlfw_zxhd
|
|
|
+ fullSendData?: type_dyaw_xlfw_zxhd_log
|
|
|
+ dxzl_id?: string
|
|
|
+ }
|
|
|
+ ) {
|
|
|
+ console.log('publisher: ', content);
|
|
|
+ //
|
|
|
+ const { operate } = content
|
|
|
+ switch (operate) {
|
|
|
+ case CHAT_OPERATION.START:
|
|
|
+ currentChatStatus = CHAT_STATUS.WAITING_YOU_ACCEPT
|
|
|
+ // rtcOptions = content.rtcOptions!;
|
|
|
+ mode = content.mode!;
|
|
|
+ dyaw_xlfw_zxhd = content.dyaw_xlfw_zxhd!
|
|
|
+ dxzl_id = content.dxzl_id!
|
|
|
+ reqDate = {
|
|
|
+ dxz_id: dyaw_xlfw_zxhd.dxz_id,
|
|
|
+ dxzl_stu_user_id: dyaw_xlfw_zxhd.dxz_stu_user_id,
|
|
|
+ dxzl_stu_user_realname: dyaw_xlfw_zxhd.dxz_stu_user_realname,
|
|
|
+ dxzl_tea_user_id: dyaw_xlfw_zxhd.dxz_tea_user_id,
|
|
|
+ dxzl_tea_user_realname: dyaw_xlfw_zxhd.dxz_tea_user_realname,
|
|
|
+ }
|
|
|
+ isOpen = true
|
|
|
+ emits('update-info', content.fullSendData!)
|
|
|
+
|
|
|
+ break;
|
|
|
+ case CHAT_OPERATION.CANCEL:
|
|
|
+ emits('update-info', content.fullSendData!, true)
|
|
|
+ isOpen = false
|
|
|
+ break;
|
|
|
+ case CHAT_OPERATION.ACCEPT:
|
|
|
+ await rtcInstance.client!.publish(rtcInstance.localAudioTrack!);
|
|
|
+ if (mode === 'video')
|
|
|
+ await rtcInstance.client!.publish(rtcInstance.localVideoTrack!);
|
|
|
+ currentChatStatus = CHAT_STATUS.CHATING
|
|
|
+ offsetTimer = setInterval(() => {
|
|
|
+ offsetTime = (offsetTime as number + 1)
|
|
|
+ }, 1000)
|
|
|
+ break;
|
|
|
+ case CHAT_OPERATION.DENY:
|
|
|
+ emits('update-info', content.fullSendData!, true)
|
|
|
+ isOpen = false
|
|
|
+ await rtcInstance.client?.leave();
|
|
|
+ break;
|
|
|
+ case CHAT_OPERATION.END:
|
|
|
+ emits('update-info', content.fullSendData!, true)
|
|
|
+ offsetTimer && clearInterval(offsetTimer)
|
|
|
+ isOpen = false
|
|
|
+ await rtcInstance.client!.leave();
|
|
|
+ break;
|
|
|
+ default:
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+})
|
|
|
+
|
|
|
+function handleInfoAdd(tip: string) {
|
|
|
+ const _reqDate = Object.assign({
|
|
|
+ dxzl_status: '1',
|
|
|
+ dxzl_type: mode === 'audio' ? '3' : '4',
|
|
|
+ dxzl_last_msg_content: encodeURIComponent(`【${mode === 'audio' ? '语音通话' : '视频通话'}】`),
|
|
|
+ }, reqDate)
|
|
|
+ return request({
|
|
|
+ url: '/dyaw/xlfw_zxhd_log/add',
|
|
|
+ data: {
|
|
|
+ dyaw_xlfw_zxhd_log: _reqDate
|
|
|
+ }
|
|
|
+ }).then(res => {
|
|
|
+ if (res.code === '1') {
|
|
|
+ const fullSendData = {
|
|
|
+ create_user_id: user.user_id,
|
|
|
+ create_dateline: Date.now().toString().slice(0, 10),
|
|
|
+ ..._reqDate,
|
|
|
+ dxzl_id: `${res.data.insert_id}`
|
|
|
+ } as type_dyaw_xlfw_zxhd_log
|
|
|
+ emits('update-info', fullSendData)
|
|
|
+ dxzl_id = `${res.data.insert_id}`
|
|
|
+ return fullSendData
|
|
|
+ }
|
|
|
+ return Promise.reject()
|
|
|
+ })
|
|
|
+}
|
|
|
+
|
|
|
+function handleInfoEdit(data: Partial<type_dyaw_xlfw_zxhd_log>, tip?: string) {
|
|
|
+ const _reqDate = Object.assign({
|
|
|
+ dxzl_last_msg_content: tip ? encodeURIComponent(`【${mode === 'audio' ? '语音通话' : '视频通话'}】 ${tip}`) : undefined,
|
|
|
+ }, data)
|
|
|
+ return request({
|
|
|
+ url: '/dyaw/xlfw_zxhd_log/edit',
|
|
|
+ data: {
|
|
|
+ dxzl_id: dxzl_id,
|
|
|
+ dyaw_xlfw_zxhd_log: _reqDate
|
|
|
+ }
|
|
|
+ }).then(res => {
|
|
|
+ if (res.code === '1') {
|
|
|
+ emits('update-info', {
|
|
|
+ dxzl_id: dxzl_id!,
|
|
|
+ ..._reqDate
|
|
|
+ }, true)
|
|
|
+ return {
|
|
|
+ dxzl_id: dxzl_id!,
|
|
|
+ ..._reqDate
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return Promise.reject()
|
|
|
+ })
|
|
|
+}
|
|
|
+
|
|
|
+const LocalPlayerContainerRef = $ref<HTMLElement>()
|
|
|
+const RemotePlayerContainerRef = $ref<HTMLElement>()
|
|
|
+// ==========
|
|
|
+// chat audio/video
|
|
|
+// ==========
|
|
|
+let rtcInstance: {
|
|
|
+ client?: IAgoraRTCClient;
|
|
|
+ localAudioTrack?: IMicrophoneAudioTrack;
|
|
|
+ localVideoTrack?: ICameraVideoTrack
|
|
|
+} = {
|
|
|
+ client: undefined,
|
|
|
+ localAudioTrack: undefined,
|
|
|
+ localVideoTrack: undefined,
|
|
|
+}
|
|
|
+type TRtcOptions = {
|
|
|
+ appId: string;
|
|
|
+ channel: string;
|
|
|
+ token: string;
|
|
|
+}
|
|
|
+let rtcOptions: TRtcOptions;
|
|
|
+
|
|
|
+function initRtcClient() {
|
|
|
+ if (rtcInstance.client) return;
|
|
|
+ const client = rtcInstance.client = AgoraRTC.createClient({ mode: "rtc", codec: "vp8" });
|
|
|
+
|
|
|
+ client.on("user-published", async (user, mediaType) => {
|
|
|
+ await client.subscribe(user, mediaType);
|
|
|
+ if (mediaType === "audio") {
|
|
|
+ const audioTrack = user.audioTrack;
|
|
|
+ audioTrack?.play();
|
|
|
+ } else {
|
|
|
+ const videoTrack = user.videoTrack;
|
|
|
+ videoTrack?.play(RemotePlayerContainerRef as HTMLElement);
|
|
|
+ }
|
|
|
+ });
|
|
|
+}
|
|
|
+
|
|
|
+async function getRtcOption() {
|
|
|
+ try {
|
|
|
+ await request({
|
|
|
+ url: '/dyaw/xlfw_zxhd/get_rtc_token',
|
|
|
+ data: {
|
|
|
+ dxz_id: dyaw_xlfw_zxhd?.dxz_id
|
|
|
+ }
|
|
|
+ }).then(async res => {
|
|
|
+ if (res.code === '1') {
|
|
|
+ let resp: { jgim_roomid: string; rtc_appid: string; rtc_token: string } = res.data.one_info
|
|
|
+ return rtcOptions = {
|
|
|
+ appId: resp.rtc_appid,
|
|
|
+ // channel: resp.jgim_roomid,
|
|
|
+ channel: dyaw_xlfw_zxhd!.dxz_id,
|
|
|
+ token: resp.rtc_token,
|
|
|
+ // uid: user.user_id
|
|
|
+ }
|
|
|
+ }
|
|
|
+ })
|
|
|
+ } catch (error) {
|
|
|
+ console.error(error);
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+async function handleAudioChatStart() {
|
|
|
+ try {
|
|
|
+ isOpen = true
|
|
|
+ currentChatStatus = CHAT_STATUS.WAITING_OTHERS_ACCEPT
|
|
|
+ let isBusy;
|
|
|
+ await request({
|
|
|
+ url: '/dyaw/xlfw_zxhd/get_user_status',
|
|
|
+ data: {
|
|
|
+ user_id: otherInfo?.id
|
|
|
+ }
|
|
|
+ }).then(res => {
|
|
|
+ if (res.code === '1') {
|
|
|
+ isBusy = !!res.data.status
|
|
|
+ }
|
|
|
+ })
|
|
|
+ if (isBusy) {
|
|
|
+ // busy operation
|
|
|
+ currentChatStatus = CHAT_STATUS.WAITING_BUSY
|
|
|
+ setTimeout(() => {
|
|
|
+ handleClose()
|
|
|
+ }, 2000)
|
|
|
+ return
|
|
|
+ };
|
|
|
+ await AgoraRTC.getMicrophones()
|
|
|
+ if (mode === 'video')
|
|
|
+ await AgoraRTC.getCameras()
|
|
|
+ initRtcClient()
|
|
|
+
|
|
|
+ await getRtcOption()
|
|
|
+
|
|
|
+ const fullSendData = await handleInfoAdd('拨号中')
|
|
|
+
|
|
|
+
|
|
|
+ await nextTick(async () => {
|
|
|
+ await rtcInstance.client!.join(rtcOptions.appId, rtcOptions.channel, rtcOptions?.token, /*rtcOptions.uid*/ user.user_id);
|
|
|
+ rtcInstance.localAudioTrack = await AgoraRTC.createMicrophoneAudioTrack();
|
|
|
+ // await rtcInstance.client!.publish(rtcInstance.localAudioTrack);
|
|
|
+ if (mode === 'video') {
|
|
|
+ rtcInstance.localVideoTrack = await AgoraRTC.createCameraVideoTrack();
|
|
|
+ rtcInstance.localVideoTrack.play(LocalPlayerContainerRef as HTMLElement);
|
|
|
+ // await rtcInstance.client!.publish(rtcInstance.localVideoTrack);
|
|
|
+ }
|
|
|
+
|
|
|
+ socketSend(ws2, {
|
|
|
+ dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
|
|
|
+ dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
|
|
|
+ operate: CHAT_OPERATION.START,
|
|
|
+ mode,
|
|
|
+ // rtcOptions,
|
|
|
+ dyaw_xlfw_zxhd,
|
|
|
+ fullSendData,
|
|
|
+ dxzl_id: fullSendData?.dxzl_id
|
|
|
+ })
|
|
|
+ })
|
|
|
+
|
|
|
+ } catch (error) {
|
|
|
+ console.error(error);
|
|
|
+ handleClose()
|
|
|
+ }
|
|
|
+
|
|
|
+}
|
|
|
+async function handleAudioChatCancel() {
|
|
|
+ const fullSendData = await handleInfoEdit({ dxzl_status: '2' }, '已取消')
|
|
|
+ socketSend(ws2, {
|
|
|
+ dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
|
|
|
+ dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
|
|
|
+ operate: CHAT_OPERATION.CANCEL,
|
|
|
+ fullSendData
|
|
|
+ })
|
|
|
+ isOpen = false
|
|
|
+ rtcInstance.client?.leave();
|
|
|
+
|
|
|
+}
|
|
|
+async function handleAudioChatAccept() {
|
|
|
+ currentChatStatus = CHAT_STATUS.WAITING_ACCEPT
|
|
|
+
|
|
|
+
|
|
|
+ const fullSendData = await handleInfoEdit({ dxzl_status: '3' }, '通话中')
|
|
|
+ // ...
|
|
|
+ await getRtcOption()
|
|
|
+ initRtcClient()
|
|
|
+ await rtcInstance.client!.join(rtcOptions.appId, rtcOptions.channel, rtcOptions?.token, /*rtcOptions.uid*/ user.user_id);
|
|
|
+ rtcInstance.localAudioTrack = await AgoraRTC.createMicrophoneAudioTrack();
|
|
|
+ await rtcInstance.client!.publish(rtcInstance.localAudioTrack);
|
|
|
+ if (mode === 'video') {
|
|
|
+ rtcInstance.localVideoTrack = await AgoraRTC.createCameraVideoTrack();
|
|
|
+ rtcInstance.localVideoTrack.play(LocalPlayerContainerRef as HTMLElement);
|
|
|
+ await rtcInstance.client!.publish(rtcInstance.localVideoTrack);
|
|
|
+ }
|
|
|
+ //
|
|
|
+ socketSend(ws2, {
|
|
|
+ dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
|
|
|
+ dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
|
|
|
+ operate: CHAT_OPERATION.ACCEPT,
|
|
|
+ fullSendData
|
|
|
+ })
|
|
|
+ currentChatStatus = CHAT_STATUS.CHATING
|
|
|
+ offsetTimer = setInterval(() => {
|
|
|
+ offsetTime = (offsetTime as number + 1)
|
|
|
+ }, 1000)
|
|
|
+}
|
|
|
+async function handleAudioChatDeny() {
|
|
|
+ const fullSendData = await handleInfoEdit({ dxzl_status: '4' }, '已拒接')
|
|
|
+ socketSend(ws2, {
|
|
|
+ dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
|
|
|
+ dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
|
|
|
+ operate: CHAT_OPERATION.DENY,
|
|
|
+ fullSendData
|
|
|
+ })
|
|
|
+ isOpen = false
|
|
|
+}
|
|
|
+async function handleAudioChatEnd() {
|
|
|
+ const fullSendData = await handleInfoEdit({ dxzl_status: '4' }, '已结束')
|
|
|
+ offsetTimer && clearInterval(offsetTimer)
|
|
|
+ await rtcInstance.client!.leave();
|
|
|
+ socketSend(ws2, {
|
|
|
+ dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
|
|
|
+ dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
|
|
|
+ operate: CHAT_OPERATION.END,
|
|
|
+ fullSendData
|
|
|
+ })
|
|
|
+ isOpen = false
|
|
|
+}
|
|
|
+
|
|
|
+let audioInUse = $ref(true)
|
|
|
+let videoInUse = $ref(true)
|
|
|
+
|
|
|
+function handleSwitchAudio() {
|
|
|
+ audioInUse = !audioInUse
|
|
|
+ try {
|
|
|
+ rtcInstance.localAudioTrack!.setEnabled(audioInUse)
|
|
|
+ } catch (error) {
|
|
|
+ console.error(error);
|
|
|
+ }
|
|
|
+
|
|
|
+}
|
|
|
+function handleSwitchVideo() {
|
|
|
+ videoInUse = !videoInUse
|
|
|
+ try {
|
|
|
+ rtcInstance.localVideoTrack!.setEnabled(videoInUse)
|
|
|
+ } catch (error) {
|
|
|
+ console.error(error);
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+</script>
|
|
|
+
|
|
|
+<template>
|
|
|
+ <UseDraggable v-if="isOpen" storage-key="chat-audio" storage-type="session" :initial-value="{ x: 584, y: 207 }"
|
|
|
+ class="fixed w-375px h-670px bg-hex-191919 cursor-move z-4000">
|
|
|
+ <div class="w-full flex justify-end items-center p-2 text-light-50 h-36px">
|
|
|
+ <i:clarity:window-min-line class="cursor-pointer" />
|
|
|
+ <i:ic:outline-close class="cursor-pointer" @click="handleClose" />
|
|
|
+ </div>
|
|
|
+ <div class="h-634px relative">
|
|
|
+ <slot>
|
|
|
+
|
|
|
+ <div v-show="mode === 'video' && currentChatStatus === CHAT_STATUS.CHATING" ref="LocalPlayerContainerRef"
|
|
|
+ class="absolute z-6 w-144px h-174px top-80px right-10px border border-dark-50"></div>
|
|
|
+ <div v-show="mode === 'video' && currentChatStatus === CHAT_STATUS.CHATING" ref="RemotePlayerContainerRef"
|
|
|
+ class="absolute z-1 inset-0"></div>
|
|
|
+
|
|
|
+
|
|
|
+ <div class="absolute z-11 left-0 bottom-66px text-xl flex justify-around w-full text-light-50 ">
|
|
|
+ <div v-show="currentChatStatus === CHAT_STATUS.WAITING_YOU_ACCEPT"
|
|
|
+ class="bg-green-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
|
|
|
+ @click="handleAudioChatAccept">
|
|
|
+ <i:ic:baseline-phone />
|
|
|
+ </div>
|
|
|
+ <div v-show="currentChatStatus === CHAT_STATUS.WAITING_YOU_ACCEPT"
|
|
|
+ class="bg-red-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
|
|
|
+ @click="handleAudioChatDeny">
|
|
|
+ <i:mdi:phone-hangup />
|
|
|
+ </div>
|
|
|
+ <div v-show="currentChatStatus === CHAT_STATUS.CHATING"
|
|
|
+ class="bg-hex-efefef text-hex-272636 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
|
|
|
+ @click="handleSwitchAudio">
|
|
|
+ <i:ant-design:audio-outlined v-show="audioInUse" />
|
|
|
+ <i:ant-design:audio-muted-outlined v-show="!audioInUse" />
|
|
|
+ </div>
|
|
|
+ <div v-show="currentChatStatus === CHAT_STATUS.CHATING"
|
|
|
+ class="bg-red-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
|
|
|
+ @click="handleAudioChatEnd">
|
|
|
+ <i:ic:outline-close></i:ic:outline-close>
|
|
|
+ </div>
|
|
|
+ <div
|
|
|
+ v-show="currentChatStatus === CHAT_STATUS.WAITING_OTHERS_ACCEPT || currentChatStatus === CHAT_STATUS.WAITING_BUSY"
|
|
|
+ class="bg-red-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
|
|
|
+ @click="handleAudioChatCancel">
|
|
|
+ <i:ic:outline-close></i:ic:outline-close>
|
|
|
+ </div>
|
|
|
+ <div v-show="mode === 'video' && currentChatStatus === CHAT_STATUS.CHATING"
|
|
|
+ class="bg-hex-efefef text-hex-272636 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
|
|
|
+ @click="handleSwitchVideo">
|
|
|
+ <i:material-symbols:video-camera-back-rounded v-show="videoInUse" />
|
|
|
+ <i:material-symbols:video-camera-front-off-rounded v-show="!videoInUse" />
|
|
|
+ </div>
|
|
|
+ </div>
|
|
|
+ <!-- -->
|
|
|
+ <div class="pt-24 flex_center flex-col text-light-50 space-y-4">
|
|
|
+ <!-- <div>{{ CHAT_STATUS[currentChatStatus] }}</div> -->
|
|
|
+ <el-avatar v-show="mode === 'audio' || (mode === 'video' && currentChatStatus !== CHAT_STATUS.CHATING)"
|
|
|
+ :size="158" :src="otherInfo?.avatar"></el-avatar>
|
|
|
+ <div v-show="mode === 'audio' || (mode === 'video' && currentChatStatus !== CHAT_STATUS.CHATING)">{{
|
|
|
+ otherInfo?.realname
|
|
|
+ }}</div>
|
|
|
+ <!-- <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"> -->
|
|
|
+ <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"
|
|
|
+ v-show="currentChatStatus === CHAT_STATUS.WAITING_OTHERS_ACCEPT">
|
|
|
+ <div>正在等待对方接受邀请</div>
|
|
|
+ <i:line-md:loading-alt-loop class="text-xl" />
|
|
|
+ </div>
|
|
|
+ <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"
|
|
|
+ v-show="currentChatStatus === CHAT_STATUS.WAITING_YOU_ACCEPT">
|
|
|
+ <div>邀请你语音通话...</div>
|
|
|
+ </div>
|
|
|
+ <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"
|
|
|
+ v-show="currentChatStatus === CHAT_STATUS.WAITING_BUSY">
|
|
|
+ <div class="text-red-500">对方忙线中</div>
|
|
|
+ <div class="text-red-500">请稍后再试</div>
|
|
|
+ <i:line-md:loading-alt-loop class="text-xl" />
|
|
|
+ </div>
|
|
|
+ <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"
|
|
|
+ v-show="currentChatStatus === CHAT_STATUS.WAITING_ACCEPT">
|
|
|
+ <div>接通中...</div>
|
|
|
+ </div>
|
|
|
+ <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"
|
|
|
+ v-show="mode === 'audio' && currentChatStatus === CHAT_STATUS.CHATING">
|
|
|
+ <div>正在通话中</div>
|
|
|
+ <div>{{ formatOffsetSec(offsetTime) }}</div>
|
|
|
+ </div>
|
|
|
+ <!-- </div> -->
|
|
|
+ </div>
|
|
|
+ </slot>
|
|
|
+ </div>
|
|
|
+ </UseDraggable>
|
|
|
+</template>
|
|
|
+
|