|
@@ -7,24 +7,20 @@ import request from '~/utils/request';
|
|
|
import type { type_dyaw_xlfw_zxhd, type_dyaw_xlfw_zxhd_log } from '~/types';
|
|
|
import user, { UserRole } from '~/store/user';
|
|
|
import { socketSend } from '~/utils/ws';
|
|
|
+import { formatOffsetSec } from '~/utils/time';
|
|
|
+import { useInterval } from '@vueuse/core'
|
|
|
+
|
|
|
+let offsetTime: unknown
|
|
|
|
|
|
const emits = defineEmits<{
|
|
|
- (event: 'update-info', info: type_dyaw_xlfw_zxhd_log): void;
|
|
|
+ (event: 'update-info', info: Partial<type_dyaw_xlfw_zxhd_log>, isUpdate?: boolean): void;
|
|
|
}>()
|
|
|
|
|
|
let reqDate: Partial<type_dyaw_xlfw_zxhd_log>
|
|
|
-// = {
|
|
|
-// dxz_id: dyaw_xlfw_zxhd.dxz_id,
|
|
|
-// dxzl_stu_user_id: dyaw_xlfw_zxhd.dxz_stu_user_id,
|
|
|
-// dxzl_stu_user_realname: dyaw_xlfw_zxhd.dxz_stu_user_realname,
|
|
|
-// dxzl_tea_user_id: dyaw_xlfw_zxhd.dxz_tea_user_id,
|
|
|
-// dxzl_tea_user_realname: dyaw_xlfw_zxhd.dxz_tea_user_realname,
|
|
|
-// dxzl_last_msg_content: encodeURIComponent(inputValue),
|
|
|
-// dxzl_type: inputValue.includes('><img') ? '2' : '1'
|
|
|
-// }
|
|
|
|
|
|
let dyaw_xlfw_zxhd = $ref<type_dyaw_xlfw_zxhd>()
|
|
|
let mode = $ref<'audio' | 'video'>()
|
|
|
+let dxzl_id = $ref<string>()
|
|
|
|
|
|
const otherInfo = $computed(() => {
|
|
|
if (UserRole === 'student')
|
|
@@ -34,7 +30,7 @@ const otherInfo = $computed(() => {
|
|
|
})
|
|
|
|
|
|
let isOpen = $ref<boolean>(false)
|
|
|
-let audioChatStatus = $ref<CHAT_STATUS>(CHAT_STATUS.WAITING_OTHERS_ACCEPT)
|
|
|
+let currentChatStatus = $ref<CHAT_STATUS>(CHAT_STATUS.WAITING_OTHERS_ACCEPT)
|
|
|
|
|
|
let ws2: WebSocket;
|
|
|
function handleClose() {
|
|
@@ -62,26 +58,28 @@ defineExpose({
|
|
|
close() {
|
|
|
handleClose()
|
|
|
},
|
|
|
- publisher(
|
|
|
+ async publisher(
|
|
|
content:
|
|
|
// type_dyaw_xlfw_zxhd_log &
|
|
|
{
|
|
|
- operate: CHAT_OPERATION,
|
|
|
- rtcOptions?: TRtcOptions,
|
|
|
- mode?: 'audio' | 'video',
|
|
|
- dyaw_xlfw_zxhd?: type_dyaw_xlfw_zxhd,
|
|
|
+ operate: CHAT_OPERATION
|
|
|
+ // rtcOptions?: TRtcOptions
|
|
|
+ mode?: 'audio' | 'video'
|
|
|
+ dyaw_xlfw_zxhd?: type_dyaw_xlfw_zxhd
|
|
|
fullSendData?: type_dyaw_xlfw_zxhd_log
|
|
|
+ dxzl_id?: string
|
|
|
}
|
|
|
) {
|
|
|
- console.log('publisher');
|
|
|
+ console.log('publisher: ', content);
|
|
|
//
|
|
|
const { operate } = content
|
|
|
switch (operate) {
|
|
|
case CHAT_OPERATION.START:
|
|
|
- audioChatStatus = CHAT_STATUS.WAITING_YOU_ACCEPT
|
|
|
- rtcOptions = content.rtcOptions!;
|
|
|
+ currentChatStatus = CHAT_STATUS.WAITING_YOU_ACCEPT
|
|
|
+ // rtcOptions = content.rtcOptions!;
|
|
|
mode = content.mode!;
|
|
|
dyaw_xlfw_zxhd = content.dyaw_xlfw_zxhd!
|
|
|
+ dxzl_id = content.dxzl_id!
|
|
|
reqDate = {
|
|
|
dxz_id: dyaw_xlfw_zxhd.dxz_id,
|
|
|
dxzl_stu_user_id: dyaw_xlfw_zxhd.dxz_stu_user_id,
|
|
@@ -94,19 +92,25 @@ defineExpose({
|
|
|
|
|
|
break;
|
|
|
case CHAT_OPERATION.CANCEL:
|
|
|
- // emits('update-info', content.fullSendData!)
|
|
|
+ emits('update-info', content.fullSendData!, true)
|
|
|
isOpen = false
|
|
|
break;
|
|
|
case CHAT_OPERATION.ACCEPT:
|
|
|
- audioChatStatus = CHAT_STATUS.CHATING
|
|
|
+ await rtcInstance.client!.publish(rtcInstance.localAudioTrack!);
|
|
|
+ if (mode === 'video')
|
|
|
+ await rtcInstance.client!.publish(rtcInstance.localVideoTrack!);
|
|
|
+ currentChatStatus = CHAT_STATUS.CHATING
|
|
|
+ offsetTime = useInterval(1000, { callback: n => formatOffsetSec(n) })
|
|
|
break;
|
|
|
case CHAT_OPERATION.DENY:
|
|
|
- // emits('update-info', content.fullSendData!)
|
|
|
+ emits('update-info', content.fullSendData!, true)
|
|
|
isOpen = false
|
|
|
+ await rtcInstance.client?.leave();
|
|
|
break;
|
|
|
case CHAT_OPERATION.END:
|
|
|
- // emits('update-info', content.fullSendData!)
|
|
|
+ emits('update-info', content.fullSendData!, true)
|
|
|
isOpen = false
|
|
|
+ await rtcInstance.client!.leave();
|
|
|
break;
|
|
|
default:
|
|
|
break;
|
|
@@ -116,6 +120,7 @@ defineExpose({
|
|
|
|
|
|
function handleInfoAdd(tip: string) {
|
|
|
const _reqDate = Object.assign({
|
|
|
+ dxzl_status: '1',
|
|
|
dxzl_type: mode === 'audio' ? '3' : '4',
|
|
|
dxzl_last_msg_content: encodeURIComponent(`【${mode === 'audio' ? '语音通话' : '视频通话'}】`),
|
|
|
}, reqDate)
|
|
@@ -133,21 +138,35 @@ function handleInfoAdd(tip: string) {
|
|
|
dxzl_id: `${res.data.insert_id}`
|
|
|
} as type_dyaw_xlfw_zxhd_log
|
|
|
emits('update-info', fullSendData)
|
|
|
+ dxzl_id = `${res.data.insert_id}`
|
|
|
return fullSendData
|
|
|
}
|
|
|
+ return Promise.reject()
|
|
|
})
|
|
|
}
|
|
|
|
|
|
-function handleInfoEdit(id: string, data: Partial<type_dyaw_xlfw_zxhd_log>, tip?: string) {
|
|
|
+function handleInfoEdit(data: Partial<type_dyaw_xlfw_zxhd_log>, tip?: string) {
|
|
|
const _reqDate = Object.assign({
|
|
|
- dxzl_last_msg_content: tip ?? encodeURIComponent(`【${mode === 'audio' ? '语音通话' : '视频通话'}】 ${tip}`),
|
|
|
+ dxzl_last_msg_content: tip ? encodeURIComponent(`【${mode === 'audio' ? '语音通话' : '视频通话'}】 ${tip}`) : undefined,
|
|
|
}, data)
|
|
|
return request({
|
|
|
url: '/dyaw/xlfw_zxhd_log/edit',
|
|
|
data: {
|
|
|
- dxzl_id: id,
|
|
|
+ dxzl_id: dxzl_id,
|
|
|
dyaw_xlfw_zxhd_log: _reqDate
|
|
|
}
|
|
|
+ }).then(res => {
|
|
|
+ if (res.code === '1') {
|
|
|
+ emits('update-info', {
|
|
|
+ dxzl_id: dxzl_id!,
|
|
|
+ ..._reqDate
|
|
|
+ }, true)
|
|
|
+ return {
|
|
|
+ dxzl_id: dxzl_id!,
|
|
|
+ ..._reqDate
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return Promise.reject()
|
|
|
})
|
|
|
}
|
|
|
|
|
@@ -174,12 +193,10 @@ let rtcOptions: TRtcOptions;
|
|
|
|
|
|
function initRtcClient() {
|
|
|
if (rtcInstance.client) return;
|
|
|
- let client = rtcInstance.client = AgoraRTC.createClient({ mode: "rtc", codec: "vp8" });
|
|
|
+ const client = rtcInstance.client = AgoraRTC.createClient({ mode: "rtc", codec: "vp8" });
|
|
|
|
|
|
client.on("user-published", async (user, mediaType) => {
|
|
|
-
|
|
|
await client.subscribe(user, mediaType);
|
|
|
-
|
|
|
if (mediaType === "audio") {
|
|
|
const audioTrack = user.audioTrack;
|
|
|
audioTrack?.play();
|
|
@@ -190,10 +207,34 @@ function initRtcClient() {
|
|
|
});
|
|
|
}
|
|
|
|
|
|
+async function getRtcOption() {
|
|
|
+ try {
|
|
|
+ await request({
|
|
|
+ url: '/dyaw/xlfw_zxhd/get_rtc_token',
|
|
|
+ data: {
|
|
|
+ dxz_id: dyaw_xlfw_zxhd?.dxz_id
|
|
|
+ }
|
|
|
+ }).then(async res => {
|
|
|
+ if (res.code === '1') {
|
|
|
+ let resp: { jgim_roomid: string; rtc_appid: string; rtc_token: string } = res.data.one_info
|
|
|
+ return rtcOptions = {
|
|
|
+ appId: resp.rtc_appid,
|
|
|
+ // channel: resp.jgim_roomid,
|
|
|
+ channel: dyaw_xlfw_zxhd!.dxz_id,
|
|
|
+ token: resp.rtc_token,
|
|
|
+ // uid: user.user_id
|
|
|
+ }
|
|
|
+ }
|
|
|
+ })
|
|
|
+ } catch (error) {
|
|
|
+ console.error(error);
|
|
|
+ }
|
|
|
+}
|
|
|
|
|
|
async function handleAudioChatStart() {
|
|
|
try {
|
|
|
- audioChatStatus = CHAT_STATUS.WAITING_OTHERS_ACCEPT
|
|
|
+ isOpen = true
|
|
|
+ currentChatStatus = CHAT_STATUS.WAITING_OTHERS_ACCEPT
|
|
|
let isBusy;
|
|
|
await request({
|
|
|
url: '/dyaw/xlfw_zxhd/get_user_status',
|
|
@@ -207,59 +248,52 @@ async function handleAudioChatStart() {
|
|
|
})
|
|
|
if (isBusy) {
|
|
|
// busy operation
|
|
|
- audioChatStatus = CHAT_STATUS.WAITING_BUSY
|
|
|
+ currentChatStatus = CHAT_STATUS.WAITING_BUSY
|
|
|
setTimeout(() => {
|
|
|
handleClose()
|
|
|
}, 2000)
|
|
|
return
|
|
|
};
|
|
|
- // await AgoraRTC.getMicrophones()
|
|
|
- // await AgoraRTC.getCameras()
|
|
|
- // await request({
|
|
|
- // url: '/dyaw/xlfw_zxhd/get_rtc_token',
|
|
|
- // data: {
|
|
|
- // dxz_id: dyaw_xlfw_zxhd?.dxz_id
|
|
|
- // }
|
|
|
- // }).then(async res => {
|
|
|
- // if (res.code === '1') {
|
|
|
- // let resp: { jgim_roomid: string; rtc_appid: string; rtc_token: string } = res.data.one_info
|
|
|
- // rtcOptions = {
|
|
|
- // appId: resp.rtc_appid,
|
|
|
- // // channel: resp.jgim_roomid,
|
|
|
- // channel: dyaw_xlfw_zxhd!.dxz_id,
|
|
|
- // token: resp.rtc_token,
|
|
|
- // // uid: user.user_id
|
|
|
- // }
|
|
|
-
|
|
|
- // initRtcClient()
|
|
|
-
|
|
|
- // await rtcInstance.client!.join(rtcOptions.appId, rtcOptions.channel, rtcOptions?.token, /*rtcOptions.uid*/ user.user_id);
|
|
|
- // rtcInstance.localAudioTrack = await AgoraRTC.createMicrophoneAudioTrack();
|
|
|
- // await rtcInstance.client!.publish(rtcInstance.localAudioTrack);
|
|
|
- // // rtcInstance.localVideoTrack = await AgoraRTC.createCameraVideoTrack();
|
|
|
- // // rtcInstance.localVideoTrack.play(LocalPlayerContainerRef as HTMLElement);
|
|
|
- // // await rtcInstance.client!.publish(rtcInstance.localVideoTrack);
|
|
|
- // }
|
|
|
- // })
|
|
|
+ await AgoraRTC.getMicrophones()
|
|
|
+ if (mode === 'video')
|
|
|
+ await AgoraRTC.getCameras()
|
|
|
+ initRtcClient()
|
|
|
+
|
|
|
+ await getRtcOption()
|
|
|
+
|
|
|
const fullSendData = await handleInfoAdd('拨号中')
|
|
|
|
|
|
- isOpen = true
|
|
|
- socketSend(ws2, {
|
|
|
- dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
|
|
|
- dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
|
|
|
- operate: CHAT_OPERATION.START,
|
|
|
- mode,
|
|
|
- rtcOptions,
|
|
|
- dyaw_xlfw_zxhd,
|
|
|
- dxzl_id: fullSendData?.dxzl_id
|
|
|
+
|
|
|
+ await nextTick(async () => {
|
|
|
+ await rtcInstance.client!.join(rtcOptions.appId, rtcOptions.channel, rtcOptions?.token, /*rtcOptions.uid*/ user.user_id);
|
|
|
+ rtcInstance.localAudioTrack = await AgoraRTC.createMicrophoneAudioTrack();
|
|
|
+ // await rtcInstance.client!.publish(rtcInstance.localAudioTrack);
|
|
|
+ if (mode === 'video') {
|
|
|
+ rtcInstance.localVideoTrack = await AgoraRTC.createCameraVideoTrack();
|
|
|
+ rtcInstance.localVideoTrack.play(LocalPlayerContainerRef as HTMLElement);
|
|
|
+ // await rtcInstance.client!.publish(rtcInstance.localVideoTrack);
|
|
|
+ }
|
|
|
+
|
|
|
+ socketSend(ws2, {
|
|
|
+ dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
|
|
|
+ dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
|
|
|
+ operate: CHAT_OPERATION.START,
|
|
|
+ mode,
|
|
|
+ // rtcOptions,
|
|
|
+ dyaw_xlfw_zxhd,
|
|
|
+ fullSendData,
|
|
|
+ dxzl_id: fullSendData?.dxzl_id
|
|
|
+ })
|
|
|
})
|
|
|
+
|
|
|
} catch (error) {
|
|
|
console.error(error);
|
|
|
+ handleClose()
|
|
|
}
|
|
|
|
|
|
}
|
|
|
async function handleAudioChatCancel() {
|
|
|
- const fullSendData = await handleInfoEdit('已取消')
|
|
|
+ const fullSendData = await handleInfoEdit({ dxzl_status: '2' }, '已取消')
|
|
|
socketSend(ws2, {
|
|
|
dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
|
|
|
dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
|
|
@@ -267,17 +301,37 @@ async function handleAudioChatCancel() {
|
|
|
fullSendData
|
|
|
})
|
|
|
isOpen = false
|
|
|
+ rtcInstance.client?.leave();
|
|
|
+
|
|
|
}
|
|
|
-function handleAudioChatAccept() {
|
|
|
+async function handleAudioChatAccept() {
|
|
|
+ currentChatStatus = CHAT_STATUS.WAITING_ACCEPT
|
|
|
+
|
|
|
+
|
|
|
+ const fullSendData = await handleInfoEdit({ dxzl_status: '3' }, '通话中')
|
|
|
+ // ...
|
|
|
+ await getRtcOption()
|
|
|
+ initRtcClient()
|
|
|
+ await rtcInstance.client!.join(rtcOptions.appId, rtcOptions.channel, rtcOptions?.token, /*rtcOptions.uid*/ user.user_id);
|
|
|
+ rtcInstance.localAudioTrack = await AgoraRTC.createMicrophoneAudioTrack();
|
|
|
+ await rtcInstance.client!.publish(rtcInstance.localAudioTrack);
|
|
|
+ if (mode === 'video') {
|
|
|
+ rtcInstance.localVideoTrack = await AgoraRTC.createCameraVideoTrack();
|
|
|
+ rtcInstance.localVideoTrack.play(LocalPlayerContainerRef as HTMLElement);
|
|
|
+ await rtcInstance.client!.publish(rtcInstance.localVideoTrack);
|
|
|
+ }
|
|
|
+ //
|
|
|
socketSend(ws2, {
|
|
|
dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
|
|
|
dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
|
|
|
- operate: CHAT_OPERATION.ACCEPT
|
|
|
+ operate: CHAT_OPERATION.ACCEPT,
|
|
|
+ fullSendData
|
|
|
})
|
|
|
- audioChatStatus = CHAT_STATUS.CHATING
|
|
|
+ currentChatStatus = CHAT_STATUS.CHATING
|
|
|
+ offsetTime = useInterval(1000, { callback: n => formatOffsetSec(n) })
|
|
|
}
|
|
|
async function handleAudioChatDeny() {
|
|
|
- const fullSendData = await handleInfoEdit('已拒接')
|
|
|
+ const fullSendData = await handleInfoEdit({ dxzl_status: '4' }, '已拒接')
|
|
|
socketSend(ws2, {
|
|
|
dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
|
|
|
dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
|
|
@@ -287,7 +341,8 @@ async function handleAudioChatDeny() {
|
|
|
isOpen = false
|
|
|
}
|
|
|
async function handleAudioChatEnd() {
|
|
|
- const fullSendData = await handleInfoEdit('已结束')
|
|
|
+ const fullSendData = await handleInfoEdit({ dxzl_status: '4' }, '已结束')
|
|
|
+ await rtcInstance.client!.leave();
|
|
|
socketSend(ws2, {
|
|
|
dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
|
|
|
dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
|
|
@@ -297,8 +352,26 @@ async function handleAudioChatEnd() {
|
|
|
isOpen = false
|
|
|
}
|
|
|
|
|
|
+let audioInUse = $ref(true)
|
|
|
+let videoInUse = $ref(true)
|
|
|
|
|
|
+function handleSwitchAudio() {
|
|
|
+ audioInUse = !audioInUse
|
|
|
+ try {
|
|
|
+ rtcInstance.localAudioTrack!.setEnabled(audioInUse)
|
|
|
+ } catch (error) {
|
|
|
+ console.error(error);
|
|
|
+ }
|
|
|
|
|
|
+}
|
|
|
+function handleSwitchVideo() {
|
|
|
+ videoInUse = !videoInUse
|
|
|
+ try {
|
|
|
+ rtcInstance.localVideoTrack!.setEnabled(videoInUse)
|
|
|
+ } catch (error) {
|
|
|
+ console.error(error);
|
|
|
+ }
|
|
|
+}
|
|
|
</script>
|
|
|
|
|
|
<template>
|
|
@@ -308,67 +381,84 @@ async function handleAudioChatEnd() {
|
|
|
<i:clarity:window-min-line class="cursor-pointer" />
|
|
|
<i:ic:outline-close class="cursor-pointer" @click="handleClose" />
|
|
|
</div>
|
|
|
- <div class="h-634px">
|
|
|
+ <div class="h-634px relative">
|
|
|
<slot>
|
|
|
- <div class="h-full flex_center flex-col text-light-50 space-y-4">
|
|
|
- <el-avatar :size="158" :src="otherInfo?.avatar"></el-avatar>
|
|
|
- <div>{{ otherInfo?.realname }}</div>
|
|
|
+
|
|
|
+ <div v-show="mode === 'video' && currentChatStatus === CHAT_STATUS.CHATING" ref="LocalPlayerContainerRef"
|
|
|
+ class="absolute z-6 w-144px h-174px top-80px right-10px border border-dark-50"></div>
|
|
|
+ <div v-show="mode === 'video' && currentChatStatus === CHAT_STATUS.CHATING" ref="RemotePlayerContainerRef"
|
|
|
+ class="absolute z-1 inset-0"></div>
|
|
|
+
|
|
|
+
|
|
|
+ <div class="absolute z-11 left-0 bottom-66px text-xl flex justify-around w-full text-light-50 ">
|
|
|
+ <div v-show="currentChatStatus === CHAT_STATUS.WAITING_YOU_ACCEPT"
|
|
|
+ class="bg-green-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
|
|
|
+ @click="handleAudioChatAccept">
|
|
|
+ <i:ic:baseline-phone />
|
|
|
+ </div>
|
|
|
+ <div v-show="currentChatStatus === CHAT_STATUS.WAITING_YOU_ACCEPT"
|
|
|
+ class="bg-red-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
|
|
|
+ @click="handleAudioChatDeny">
|
|
|
+ <i:mdi:phone-hangup />
|
|
|
+ </div>
|
|
|
+ <div v-show="currentChatStatus === CHAT_STATUS.CHATING"
|
|
|
+ class="bg-hex-efefef text-hex-272636 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
|
|
|
+ @click="handleSwitchAudio">
|
|
|
+ <i:ant-design:audio-outlined v-show="audioInUse" />
|
|
|
+ <i:ant-design:audio-muted-outlined v-show="!audioInUse" />
|
|
|
+ </div>
|
|
|
+ <div v-show="currentChatStatus === CHAT_STATUS.CHATING"
|
|
|
+ class="bg-red-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
|
|
|
+ @click="handleAudioChatEnd">
|
|
|
+ <i:ic:outline-close></i:ic:outline-close>
|
|
|
+ </div>
|
|
|
+ <div
|
|
|
+ v-show="currentChatStatus === CHAT_STATUS.WAITING_OTHERS_ACCEPT || currentChatStatus === CHAT_STATUS.WAITING_BUSY"
|
|
|
+ class="bg-red-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
|
|
|
+ @click="handleAudioChatCancel">
|
|
|
+ <i:ic:outline-close></i:ic:outline-close>
|
|
|
+ </div>
|
|
|
+ <div v-show="mode === 'video' && currentChatStatus === CHAT_STATUS.CHATING"
|
|
|
+ class="bg-hex-efefef text-hex-272636 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
|
|
|
+ @click="handleSwitchVideo">
|
|
|
+ <i:material-symbols:video-camera-back-rounded v-show="videoInUse" />
|
|
|
+ <i:material-symbols:video-camera-front-off-rounded v-show="!videoInUse" />
|
|
|
+ </div>
|
|
|
+ </div>
|
|
|
+ <!-- -->
|
|
|
+ <div class="pt-24 flex_center flex-col text-light-50 space-y-4">
|
|
|
+ <!-- <div>{{ CHAT_STATUS[currentChatStatus] }}</div> -->
|
|
|
+ <el-avatar v-show="mode === 'audio' || (mode === 'video' && currentChatStatus !== CHAT_STATUS.CHATING)"
|
|
|
+ :size="158" :src="otherInfo?.avatar"></el-avatar>
|
|
|
+ <div v-show="mode === 'audio' || (mode === 'video' && currentChatStatus !== CHAT_STATUS.CHATING)">{{
|
|
|
+ otherInfo?.realname
|
|
|
+ }}</div>
|
|
|
<!-- <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"> -->
|
|
|
<div class="text-hex-909090 flex_center flex-col space-y-2 h-16"
|
|
|
- v-show="audioChatStatus === CHAT_STATUS.WAITING_OTHERS_ACCEPT">
|
|
|
+ v-show="currentChatStatus === CHAT_STATUS.WAITING_OTHERS_ACCEPT">
|
|
|
<div>正在等待对方接受邀请</div>
|
|
|
<i:line-md:loading-alt-loop class="text-xl" />
|
|
|
</div>
|
|
|
<div class="text-hex-909090 flex_center flex-col space-y-2 h-16"
|
|
|
- v-show="audioChatStatus === CHAT_STATUS.WAITING_YOU_ACCEPT">
|
|
|
+ v-show="currentChatStatus === CHAT_STATUS.WAITING_YOU_ACCEPT">
|
|
|
<div>邀请你语音通话...</div>
|
|
|
</div>
|
|
|
<div class="text-hex-909090 flex_center flex-col space-y-2 h-16"
|
|
|
- v-show="audioChatStatus === CHAT_STATUS.WAITING_BUSY">
|
|
|
+ v-show="currentChatStatus === CHAT_STATUS.WAITING_BUSY">
|
|
|
<div class="text-red-500">对方忙线中</div>
|
|
|
<div class="text-red-500">请稍后再试</div>
|
|
|
<i:line-md:loading-alt-loop class="text-xl" />
|
|
|
</div>
|
|
|
<div class="text-hex-909090 flex_center flex-col space-y-2 h-16"
|
|
|
- v-show="audioChatStatus === CHAT_STATUS.CHATING">
|
|
|
+ v-show="currentChatStatus === CHAT_STATUS.WAITING_ACCEPT">
|
|
|
+ <div>接通中...</div>
|
|
|
+ </div>
|
|
|
+ <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"
|
|
|
+ v-show="mode === 'audio' && currentChatStatus === CHAT_STATUS.CHATING">
|
|
|
<div>正在通话中</div>
|
|
|
- <div>{{ '00:30' }}</div>
|
|
|
+ <div>{{ offsetTime }}</div>
|
|
|
</div>
|
|
|
<!-- </div> -->
|
|
|
- <!-- -->
|
|
|
- <div ref="LocalPlayerContainerRef"></div>
|
|
|
- <div ref="RemotePlayerContainerRef"></div>
|
|
|
- <!-- -->
|
|
|
- <div class="pt-16 text-xl flex justify-around w-full">
|
|
|
- <div v-show="audioChatStatus === CHAT_STATUS.WAITING_YOU_ACCEPT"
|
|
|
- class="bg-green-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
|
|
|
- @click="handleAudioChatAccept">
|
|
|
- <i:ic:baseline-phone />
|
|
|
- </div>
|
|
|
- <div v-show="audioChatStatus === CHAT_STATUS.WAITING_YOU_ACCEPT"
|
|
|
- class="bg-red-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
|
|
|
- @click="handleAudioChatDeny">
|
|
|
- <i:mdi:phone-hangup />
|
|
|
- </div>
|
|
|
- <div v-show="audioChatStatus === CHAT_STATUS.CHATING"
|
|
|
- class="bg-hex-efefef text-hex-272636 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
|
|
|
- @click="">
|
|
|
- <i:ant-design:audio-outlined v-show="true" />
|
|
|
- <i:ant-design:audio-muted-outlined v-show="false" />
|
|
|
- </div>
|
|
|
- <div v-show="audioChatStatus === CHAT_STATUS.CHATING"
|
|
|
- class="bg-red-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
|
|
|
- @click="handleAudioChatEnd">
|
|
|
- <i:ic:outline-close></i:ic:outline-close>
|
|
|
- </div>
|
|
|
- <div
|
|
|
- v-show="audioChatStatus === CHAT_STATUS.WAITING_OTHERS_ACCEPT || audioChatStatus === CHAT_STATUS.WAITING_BUSY"
|
|
|
- class="bg-red-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
|
|
|
- @click="handleAudioChatCancel">
|
|
|
- <i:ic:outline-close></i:ic:outline-close>
|
|
|
- </div>
|
|
|
-
|
|
|
- </div>
|
|
|
</div>
|
|
|
</slot>
|
|
|
</div>
|