index.vue 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475
  1. <script setup lang="ts">
  2. import AgoraRTC from "agora-rtc-sdk-ng"
  3. import type { IAgoraRTCClient, IMicrophoneAudioTrack, ICameraVideoTrack } from "agora-rtc-sdk-ng"
  4. import { CHAT_STATUS, CHAT_OPERATION } from '~/types';
  5. import { UseDraggable } from '@vueuse/components'
  6. import request from '~/utils/request';
  7. import type { type_dyaw_xlfw_zxhd, type_dyaw_xlfw_zxhd_log } from '~/types';
  8. import user, { UserRole } from '~/store/user';
  9. import { socketSend } from '~/utils/ws';
  10. import { formatOffsetSec } from '~/utils/time';
  11. let offsetTimer: NodeJS.Timeout
  12. let offsetTime: number
  13. const emits = defineEmits<{
  14. (event: 'update-info', info: Partial<type_dyaw_xlfw_zxhd_log>, isUpdate?: boolean): void;
  15. }>()
  16. let reqDate: Partial<type_dyaw_xlfw_zxhd_log>
  17. let dyaw_xlfw_zxhd = $ref<type_dyaw_xlfw_zxhd>()
  18. let mode = $ref<'audio' | 'video'>()
  19. let dxzl_id = $ref<string>()
  20. const otherInfo = $computed(() => {
  21. if (UserRole === 'student')
  22. return { id: dyaw_xlfw_zxhd?.dxz_tea_user_id, realname: dyaw_xlfw_zxhd?.dxz_tea_user_realname, avatar: dyaw_xlfw_zxhd?.dxx_tea_avatar }
  23. if (UserRole === 'teacher')
  24. return { id: dyaw_xlfw_zxhd?.dxz_stu_user_id, realname: dyaw_xlfw_zxhd?.dxz_stu_user_realname, avatar: dyaw_xlfw_zxhd?.dxx_user_avatar }
  25. })
  26. let isOpen = $ref<boolean>(false)
  27. let currentChatStatus = $ref<CHAT_STATUS>(CHAT_STATUS.WAITING_OTHERS_ACCEPT)
  28. let ws2: WebSocket;
  29. function handleClose() {
  30. isOpen = false
  31. }
  32. defineExpose({
  33. init(ws: WebSocket) {
  34. ws2 = ws
  35. },
  36. open(d: type_dyaw_xlfw_zxhd, _mode: 'audio' | 'video') {
  37. if (isOpen) return;
  38. // isOpen = true
  39. dyaw_xlfw_zxhd = d
  40. reqDate = {
  41. dxz_id: dyaw_xlfw_zxhd.dxz_id,
  42. dxzl_stu_user_id: dyaw_xlfw_zxhd.dxz_stu_user_id,
  43. dxzl_stu_user_realname: dyaw_xlfw_zxhd.dxz_stu_user_realname,
  44. dxzl_tea_user_id: dyaw_xlfw_zxhd.dxz_tea_user_id,
  45. dxzl_tea_user_realname: dyaw_xlfw_zxhd.dxz_tea_user_realname,
  46. }
  47. mode = _mode
  48. handleAudioChatStart()
  49. },
  50. close() {
  51. handleClose()
  52. },
  53. async publisher(
  54. content:
  55. // type_dyaw_xlfw_zxhd_log &
  56. {
  57. operate: CHAT_OPERATION
  58. // rtcOptions?: TRtcOptions
  59. mode?: 'audio' | 'video'
  60. dyaw_xlfw_zxhd?: type_dyaw_xlfw_zxhd
  61. fullSendData?: type_dyaw_xlfw_zxhd_log
  62. dxzl_id?: string
  63. }
  64. ) {
  65. console.log('publisher: ', content);
  66. //
  67. const { operate } = content
  68. switch (operate) {
  69. case CHAT_OPERATION.START:
  70. currentChatStatus = CHAT_STATUS.WAITING_YOU_ACCEPT
  71. // rtcOptions = content.rtcOptions!;
  72. mode = content.mode!;
  73. dyaw_xlfw_zxhd = content.dyaw_xlfw_zxhd!
  74. dxzl_id = content.dxzl_id!
  75. reqDate = {
  76. dxz_id: dyaw_xlfw_zxhd.dxz_id,
  77. dxzl_stu_user_id: dyaw_xlfw_zxhd.dxz_stu_user_id,
  78. dxzl_stu_user_realname: dyaw_xlfw_zxhd.dxz_stu_user_realname,
  79. dxzl_tea_user_id: dyaw_xlfw_zxhd.dxz_tea_user_id,
  80. dxzl_tea_user_realname: dyaw_xlfw_zxhd.dxz_tea_user_realname,
  81. }
  82. isOpen = true
  83. emits('update-info', content.fullSendData!)
  84. break;
  85. case CHAT_OPERATION.CANCEL:
  86. emits('update-info', content.fullSendData!, true)
  87. isOpen = false
  88. break;
  89. case CHAT_OPERATION.ACCEPT:
  90. await rtcInstance.client!.publish(rtcInstance.localAudioTrack!);
  91. if (mode === 'video')
  92. await rtcInstance.client!.publish(rtcInstance.localVideoTrack!);
  93. currentChatStatus = CHAT_STATUS.CHATING
  94. offsetTimer = setInterval(() => {
  95. offsetTime = (offsetTime as number + 1)
  96. }, 1000)
  97. break;
  98. case CHAT_OPERATION.DENY:
  99. emits('update-info', content.fullSendData!, true)
  100. isOpen = false
  101. await rtcInstance.client?.leave();
  102. break;
  103. case CHAT_OPERATION.END:
  104. emits('update-info', content.fullSendData!, true)
  105. offsetTimer && clearInterval(offsetTimer)
  106. isOpen = false
  107. await rtcInstance.client!.leave();
  108. break;
  109. default:
  110. break;
  111. }
  112. }
  113. })
  114. function handleInfoAdd(tip: string) {
  115. const _reqDate = Object.assign({
  116. dxzl_status: '1',
  117. dxzl_type: mode === 'audio' ? '3' : '4',
  118. dxzl_last_msg_content: encodeURIComponent(`【${mode === 'audio' ? '语音通话' : '视频通话'}】`),
  119. }, reqDate)
  120. return request({
  121. url: '/dyaw/xlfw_zxhd_log/add',
  122. data: {
  123. dyaw_xlfw_zxhd_log: _reqDate
  124. }
  125. }).then(res => {
  126. if (res.code === '1') {
  127. const fullSendData = {
  128. create_user_id: user.user_id,
  129. create_dateline: Date.now().toString().slice(0, 10),
  130. ..._reqDate,
  131. dxzl_id: `${res.data.insert_id}`
  132. } as type_dyaw_xlfw_zxhd_log
  133. emits('update-info', fullSendData)
  134. dxzl_id = `${res.data.insert_id}`
  135. return fullSendData
  136. }
  137. return Promise.reject()
  138. })
  139. }
  140. function handleInfoEdit(data: Partial<type_dyaw_xlfw_zxhd_log>, tip?: string) {
  141. const _reqDate = Object.assign({
  142. dxzl_last_msg_content: tip ? encodeURIComponent(`【${mode === 'audio' ? '语音通话' : '视频通话'}】 ${tip}`) : undefined,
  143. }, data)
  144. return request({
  145. url: '/dyaw/xlfw_zxhd_log/edit',
  146. data: {
  147. dxzl_id: dxzl_id,
  148. dyaw_xlfw_zxhd_log: _reqDate
  149. }
  150. }).then(res => {
  151. if (res.code === '1') {
  152. emits('update-info', {
  153. dxzl_id: dxzl_id!,
  154. ..._reqDate
  155. }, true)
  156. return {
  157. dxzl_id: dxzl_id!,
  158. ..._reqDate
  159. }
  160. }
  161. return Promise.reject()
  162. })
  163. }
  164. const LocalPlayerContainerRef = $ref<HTMLElement>()
  165. const RemotePlayerContainerRef = $ref<HTMLElement>()
  166. // ==========
  167. // chat audio/video
  168. // ==========
  169. let rtcInstance: {
  170. client?: IAgoraRTCClient;
  171. localAudioTrack?: IMicrophoneAudioTrack;
  172. localVideoTrack?: ICameraVideoTrack
  173. } = {
  174. client: undefined,
  175. localAudioTrack: undefined,
  176. localVideoTrack: undefined,
  177. }
  178. type TRtcOptions = {
  179. appId: string;
  180. channel: string;
  181. token: string;
  182. }
  183. let rtcOptions: TRtcOptions;
  184. function initRtcClient() {
  185. if (rtcInstance.client) return;
  186. const client = rtcInstance.client = AgoraRTC.createClient({ mode: "rtc", codec: "vp8" });
  187. client.on("user-published", async (user, mediaType) => {
  188. await client.subscribe(user, mediaType);
  189. if (mediaType === "audio") {
  190. const audioTrack = user.audioTrack;
  191. audioTrack?.play();
  192. } else {
  193. const videoTrack = user.videoTrack;
  194. videoTrack?.play(RemotePlayerContainerRef as HTMLElement);
  195. }
  196. });
  197. }
  198. async function getRtcOption() {
  199. try {
  200. await request({
  201. url: '/dyaw/xlfw_zxhd/get_rtc_token',
  202. data: {
  203. dxz_id: dyaw_xlfw_zxhd?.dxz_id
  204. }
  205. }).then(async res => {
  206. if (res.code === '1') {
  207. let resp: { jgim_roomid: string; rtc_appid: string; rtc_token: string } = res.data.one_info
  208. return rtcOptions = {
  209. appId: resp.rtc_appid,
  210. // channel: resp.jgim_roomid,
  211. channel: dyaw_xlfw_zxhd!.dxz_id,
  212. token: resp.rtc_token,
  213. // uid: user.user_id
  214. }
  215. }
  216. })
  217. } catch (error) {
  218. console.error(error);
  219. }
  220. }
  221. async function handleAudioChatStart() {
  222. try {
  223. isOpen = true
  224. currentChatStatus = CHAT_STATUS.WAITING_OTHERS_ACCEPT
  225. let isBusy;
  226. await request({
  227. url: '/dyaw/xlfw_zxhd/get_user_status',
  228. data: {
  229. user_id: otherInfo?.id
  230. }
  231. }).then(res => {
  232. if (res.code === '1') {
  233. isBusy = !!res.data.status
  234. }
  235. })
  236. if (isBusy) {
  237. // busy operation
  238. currentChatStatus = CHAT_STATUS.WAITING_BUSY
  239. setTimeout(() => {
  240. handleClose()
  241. }, 2000)
  242. return
  243. };
  244. await AgoraRTC.getMicrophones()
  245. if (mode === 'video')
  246. await AgoraRTC.getCameras()
  247. initRtcClient()
  248. await getRtcOption()
  249. const fullSendData = await handleInfoAdd('拨号中')
  250. await nextTick(async () => {
  251. await rtcInstance.client!.join(rtcOptions.appId, rtcOptions.channel, rtcOptions?.token, /*rtcOptions.uid*/ user.user_id);
  252. rtcInstance.localAudioTrack = await AgoraRTC.createMicrophoneAudioTrack();
  253. // await rtcInstance.client!.publish(rtcInstance.localAudioTrack);
  254. if (mode === 'video') {
  255. rtcInstance.localVideoTrack = await AgoraRTC.createCameraVideoTrack();
  256. rtcInstance.localVideoTrack.play(LocalPlayerContainerRef as HTMLElement);
  257. // await rtcInstance.client!.publish(rtcInstance.localVideoTrack);
  258. }
  259. socketSend(ws2, {
  260. dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
  261. dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
  262. operate: CHAT_OPERATION.START,
  263. mode,
  264. // rtcOptions,
  265. dyaw_xlfw_zxhd,
  266. fullSendData,
  267. dxzl_id: fullSendData?.dxzl_id
  268. })
  269. })
  270. } catch (error) {
  271. console.error(error);
  272. handleClose()
  273. }
  274. }
  275. async function handleAudioChatCancel() {
  276. const fullSendData = await handleInfoEdit({ dxzl_status: '2' }, '已取消')
  277. socketSend(ws2, {
  278. dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
  279. dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
  280. operate: CHAT_OPERATION.CANCEL,
  281. fullSendData
  282. })
  283. isOpen = false
  284. rtcInstance.client?.leave();
  285. }
  286. async function handleAudioChatAccept() {
  287. currentChatStatus = CHAT_STATUS.WAITING_ACCEPT
  288. const fullSendData = await handleInfoEdit({ dxzl_status: '3' }, '通话中')
  289. // ...
  290. await getRtcOption()
  291. initRtcClient()
  292. await rtcInstance.client!.join(rtcOptions.appId, rtcOptions.channel, rtcOptions?.token, /*rtcOptions.uid*/ user.user_id);
  293. rtcInstance.localAudioTrack = await AgoraRTC.createMicrophoneAudioTrack();
  294. await rtcInstance.client!.publish(rtcInstance.localAudioTrack);
  295. if (mode === 'video') {
  296. rtcInstance.localVideoTrack = await AgoraRTC.createCameraVideoTrack();
  297. rtcInstance.localVideoTrack.play(LocalPlayerContainerRef as HTMLElement);
  298. await rtcInstance.client!.publish(rtcInstance.localVideoTrack);
  299. }
  300. //
  301. socketSend(ws2, {
  302. dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
  303. dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
  304. operate: CHAT_OPERATION.ACCEPT,
  305. fullSendData
  306. })
  307. currentChatStatus = CHAT_STATUS.CHATING
  308. offsetTimer = setInterval(() => {
  309. offsetTime = (offsetTime as number + 1)
  310. }, 1000)
  311. }
  312. async function handleAudioChatDeny() {
  313. const fullSendData = await handleInfoEdit({ dxzl_status: '4' }, '已拒接')
  314. socketSend(ws2, {
  315. dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
  316. dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
  317. operate: CHAT_OPERATION.DENY,
  318. fullSendData
  319. })
  320. isOpen = false
  321. }
  322. async function handleAudioChatEnd() {
  323. const fullSendData = await handleInfoEdit({ dxzl_status: '4' }, '已结束')
  324. offsetTimer && clearInterval(offsetTimer)
  325. await rtcInstance.client!.leave();
  326. socketSend(ws2, {
  327. dxzl_stu_user_id: dyaw_xlfw_zxhd!.dxz_stu_user_id,
  328. dxzl_tea_user_id: dyaw_xlfw_zxhd!.dxz_tea_user_id,
  329. operate: CHAT_OPERATION.END,
  330. fullSendData
  331. })
  332. isOpen = false
  333. }
  334. let audioInUse = $ref(true)
  335. let videoInUse = $ref(true)
  336. function handleSwitchAudio() {
  337. audioInUse = !audioInUse
  338. try {
  339. rtcInstance.localAudioTrack!.setEnabled(audioInUse)
  340. } catch (error) {
  341. console.error(error);
  342. }
  343. }
  344. function handleSwitchVideo() {
  345. videoInUse = !videoInUse
  346. try {
  347. rtcInstance.localVideoTrack!.setEnabled(videoInUse)
  348. } catch (error) {
  349. console.error(error);
  350. }
  351. }
  352. </script>
  353. <template>
  354. <UseDraggable v-if="isOpen" storage-key="chat-audio" storage-type="session" :initial-value="{ x: 584, y: 207 }"
  355. class="fixed w-375px h-670px bg-hex-191919 cursor-move z-4000">
  356. <div class="w-full flex justify-end items-center p-2 text-light-50 h-36px">
  357. <i:clarity:window-min-line class="cursor-pointer" />
  358. <i:ic:outline-close class="cursor-pointer" @click="handleClose" />
  359. </div>
  360. <div class="h-634px relative">
  361. <slot>
  362. <div v-show="mode === 'video' && currentChatStatus === CHAT_STATUS.CHATING" ref="LocalPlayerContainerRef"
  363. class="absolute z-6 w-144px h-174px top-80px right-10px border border-dark-50"></div>
  364. <div v-show="mode === 'video' && currentChatStatus === CHAT_STATUS.CHATING" ref="RemotePlayerContainerRef"
  365. class="absolute z-1 inset-0"></div>
  366. <div class="absolute z-11 left-0 bottom-66px text-xl flex justify-around w-full text-light-50 ">
  367. <div v-show="currentChatStatus === CHAT_STATUS.WAITING_YOU_ACCEPT"
  368. class="bg-green-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
  369. @click="handleAudioChatAccept">
  370. <i:ic:baseline-phone />
  371. </div>
  372. <div v-show="currentChatStatus === CHAT_STATUS.WAITING_YOU_ACCEPT"
  373. class="bg-red-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
  374. @click="handleAudioChatDeny">
  375. <i:mdi:phone-hangup />
  376. </div>
  377. <div v-show="currentChatStatus === CHAT_STATUS.CHATING"
  378. class="bg-hex-efefef text-hex-272636 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
  379. @click="handleSwitchAudio">
  380. <i:ant-design:audio-outlined v-show="audioInUse" />
  381. <i:ant-design:audio-muted-outlined v-show="!audioInUse" />
  382. </div>
  383. <div v-show="currentChatStatus === CHAT_STATUS.CHATING"
  384. class="bg-red-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
  385. @click="handleAudioChatEnd">
  386. <i:ic:outline-close></i:ic:outline-close>
  387. </div>
  388. <div
  389. v-show="currentChatStatus === CHAT_STATUS.WAITING_OTHERS_ACCEPT || currentChatStatus === CHAT_STATUS.WAITING_BUSY"
  390. class="bg-red-600 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
  391. @click="handleAudioChatCancel">
  392. <i:ic:outline-close></i:ic:outline-close>
  393. </div>
  394. <div v-show="mode === 'video' && currentChatStatus === CHAT_STATUS.CHATING"
  395. class="bg-hex-efefef text-hex-272636 w-12 h-12 rounded-1 cursor-pointer flex items-center justify-around"
  396. @click="handleSwitchVideo">
  397. <i:material-symbols:video-camera-back-rounded v-show="videoInUse" />
  398. <i:material-symbols:video-camera-front-off-rounded v-show="!videoInUse" />
  399. </div>
  400. </div>
  401. <!-- -->
  402. <div class="pt-24 flex_center flex-col text-light-50 space-y-4">
  403. <!-- <div>{{ CHAT_STATUS[currentChatStatus] }}</div> -->
  404. <el-avatar v-show="mode === 'audio' || (mode === 'video' && currentChatStatus !== CHAT_STATUS.CHATING)"
  405. :size="158" :src="otherInfo?.avatar"></el-avatar>
  406. <div v-show="mode === 'audio' || (mode === 'video' && currentChatStatus !== CHAT_STATUS.CHATING)">{{
  407. otherInfo?.realname
  408. }}</div>
  409. <!-- <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"> -->
  410. <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"
  411. v-show="currentChatStatus === CHAT_STATUS.WAITING_OTHERS_ACCEPT">
  412. <div>正在等待对方接受邀请</div>
  413. <i:line-md:loading-alt-loop class="text-xl" />
  414. </div>
  415. <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"
  416. v-show="currentChatStatus === CHAT_STATUS.WAITING_YOU_ACCEPT">
  417. <div>邀请你语音通话...</div>
  418. </div>
  419. <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"
  420. v-show="currentChatStatus === CHAT_STATUS.WAITING_BUSY">
  421. <div class="text-red-500">对方忙线中</div>
  422. <div class="text-red-500">请稍后再试</div>
  423. <i:line-md:loading-alt-loop class="text-xl" />
  424. </div>
  425. <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"
  426. v-show="currentChatStatus === CHAT_STATUS.WAITING_ACCEPT">
  427. <div>接通中...</div>
  428. </div>
  429. <div class="text-hex-909090 flex_center flex-col space-y-2 h-16"
  430. v-show="mode === 'audio' && currentChatStatus === CHAT_STATUS.CHATING">
  431. <div>正在通话中</div>
  432. <div>{{ formatOffsetSec(offsetTime) }}</div>
  433. </div>
  434. <!-- </div> -->
  435. </div>
  436. </slot>
  437. </div>
  438. </UseDraggable>
  439. </template>