<script lang="ts" setup name="audioDialog"> import { onMounted, onUnmounted, ref } from 'vue' import { Stopwatch, VideoPause, VideoPlay } from '@element-plus/icons-vue' import { ElMessage, ElMessageBox } from 'element-plus' import dayjs from 'dayjs' import {fileUpload, voiceAdd} from '@/api/ycjg/aqbb' import { log } from '@/utils/log' const emits = defineEmits(['closeRefresh']) // 对话框类型 const dialogStatus = ref('create') const dialogVisible = ref(false) // 显示标题 const textMap: { [key: string]: string } = { create: '广播中', } let data = {} let pause = 0 let startTime: any let isFirstPlay = true const isRecording = ref(false) const elapsedTime = ref(0) const player = ref(null) const audioChunks = ref([]) const mediaRecorder = ref(null) const audioUrl = ref(null) let timer: any = null // 秒表 const updateTime = () => { if (isRecording.value) { const currentTime = new Date().getTime() const diff = (currentTime - startTime) / 1000 elapsedTime.value = (Number(pause) + Math.floor(diff * 100) / 100).toFixed(2) } } function initDialog(dialogstatus: string, row: any) { dialogStatus.value = dialogstatus dialogVisible.value = true data = row audioChunks.value = [] nextTick(() => { player.value = new window.JSPlugin({ szId: 'player', szBasePath: './', iMaxSplit: 1, iCurrentSplit: 2, openDebug: true, iWidth: 0, iHeight: 0, oStyle: { borderSelect: '#FFCC00', }, }) initAudio() initRecording() elapsedTime.value = 0 pause = 0 isRecording.value = false isFirstPlay = true timer = setInterval(updateTime, 100) }) } // 关闭并刷新 function closeRefresh() { dialogVisible.value = false pause = 0 startTime = null clearInterval(timer) player.value = null emits('closeRefresh') } onMounted(() => { }) const waveformCanvas = ref(null) let audioContext = null let mediaStream = null let sourceNode = null let analyser = null let waveformCanvasCtx = null let frameId = null function initAudio() { audioContext = new (window.AudioContext || window.webkitAudioContext)() analyser = audioContext.createAnalyser() analyser.fftSize = 512 waveformCanvasCtx = waveformCanvas.value.getContext('2d') } // 海康开始对讲 function talkStart() { player.value!.JS_SetConnectTimeOut(0, 1000) data.camera.forEach((item: any) => { try { player.value!.JS_StartTalk(item.url).then( () => { console.log('talkStart success') }, (e: any) => { console.error(e) }, ) } catch (e) {} }) } // 海康结束对讲 function talkStop() { if (mediaRecorder.value !== null) { mediaRecorder.value.pause() // 暂停 } isRecording.value = false pause = elapsedTime.value player.value!.JS_StopTalk().then( () => { console.log('talkStop success') }, (e: any) => { console.error(e) }, ) } function startRecording() { if (isRecording.value) { ElMessage({ message: '已在对讲录制中', type: 'warning', }) return } waveformCanvasCtx.clearRect(0, 0, waveformCanvas.value.width, waveformCanvas.value.height) if (isFirstPlay) { const time = dayjs().format('YYYY-MM-DD HH:mm:ss') log('开始播报', `开始播报时间:${time}`) mediaRecorder.value!.start() // 开始 isFirstPlay = false } else { mediaRecorder.value!.resume() // 恢复 } try { talkStart() } catch (e) {} startTime = new Date().getTime() isRecording.value = true } // 本地媒体 async function initRecording() { await navigator.mediaDevices.getUserMedia({ audio: true }) .then((stream) => { // 录 mediaRecorder.value = new MediaRecorder(stream, { mimeType: 'audio/webm' }) mediaRecorder.value.ondataavailable = (event) => { if (event.data && event.data.size > 0) { audioChunks.value.push(event.data) } } // 画 mediaStream = stream sourceNode = audioContext.createMediaStreamSource(mediaStream) sourceNode.connect(analyser) updateWaveform() }) .catch(error => console.error('Error capturing audio:', error)) } function endAll() { stopRecording() if (mediaRecorder.value !== null) { mediaRecorder.value.stop() } ElMessageBox.prompt( '广播完成,是否保存录制音频? 如需保存,请填写音频名称。', '提示', { confirmButtonText: '是', cancelButtonText: '否', type: 'info', }, ).then(({ value }) => { if (value === '' || value === null) { ElMessage.warning('请填写音频名称') return } const blob = new Blob(audioChunks.value, { type: 'audio/webm' }) // audioUrl.value = URL.createObjectURL(blob) // 创建一个新的File对象用于上传 const fileObj = new File([blob], `${value}.mp3`, { lastModified: new Date().getTime(), type: 'audio/webm', }) // // 下载到本地 // const url = URL.createObjectURL(blob) // const a = document.createElement('a') // a.href = url // a.download = value + '.mp3' // document.body.appendChild(a) // a.click() // document.body.removeChild(a) const fd = new FormData() fd.append('file', fileObj) fileUpload(fd).then((response) => { if (response.code === 200) { // 提交录制文件 const params = { groupId: data.id, voiceName: value, voiceLong: elapsedTime.value, voiceUrl: window.localStorage.getItem('baseurl-safe') + '/static/' + response.data, } voiceAdd(params).then((res) => { if (res.code === 200) { ElMessage.success('提交录制成功') closeRefresh() } }) } }) // const reader = new FileReader() // reader.onload = function (event) { // let binaryData = event.target.result // } // reader.readAsArrayBuffer(fileObj) }).catch(() => { // 选否 closeRefresh() }) } function stopRecording() { try { talkStop() } catch (e) {} clearInterval(timer) if (mediaStream) { mediaStream.getTracks().forEach(track => track.stop()) mediaStream = null } if (sourceNode) { sourceNode.disconnect() sourceNode = null } cancelAnimationFrame(frameId) } function updateWaveform() { const bufferLength = analyser.frequencyBinCount const dataArray = new Uint8Array(bufferLength) analyser.getByteTimeDomainData(dataArray) waveformCanvasCtx.clearRect(0, 0, waveformCanvas.value.width, waveformCanvas.value.height) const width = waveformCanvas.value.width waveformCanvasCtx.fillStyle = 'black' waveformCanvasCtx.beginPath() waveformCanvasCtx.moveTo(0, waveformCanvas.value.height / 2) if (isRecording.value) { for (let i = 0; i < bufferLength; i++) { const x = (i / bufferLength) * width const y = dataArray[i] / 128.0 * waveformCanvas.value.height / 2 waveformCanvasCtx.lineTo(x, y + waveformCanvas.value.height / 2) } } else { for (let i = 0; i < bufferLength; i++) { const x = (i / bufferLength) * width // const y = dataArray[i] / 128.0 * waveformCanvas.value.height / 2 waveformCanvasCtx.lineTo(x, waveformCanvas.value.height) } } waveformCanvasCtx.lineTo(width, waveformCanvas.value.height / 2) waveformCanvasCtx.fill() frameId = requestAnimationFrame(updateWaveform) } function dialogClose() { dialogVisible.value = false } onMounted(() => { }) onUnmounted(() => { stopRecording() startTime = null }) defineExpose({ initDialog }) </script> <template> <el-dialog v-model="dialogVisible" :title="textMap[dialogStatus]" width="900" :before-close="dialogClose" append-to-body :open-delay="0" :close-on-click-modal="false" > <div style="position: absolute;right: 80px;top:20px;font-size: 18px;letter-spacing: 1px"> {{ elapsedTime }} </div> <canvas ref="waveformCanvas" style="margin-top: -150px;" /> <el-button type="primary" :icon="VideoPlay" @click="startRecording"> 开始录制 </el-button> <el-button type="info" plain :icon="VideoPause" @click="talkStop"> 暂停录制 </el-button> <el-button type="info" :icon="Stopwatch" @click="endAll"> 结束录制 </el-button> <div id="player" style="width: 0px;height: 0px"/> <!-- <div class="bb-cover" :style="`height:${isRecording ? 0 : 150}px`"/>--> </el-dialog> </template> <style> canvas { width: 100%; height: 300px !important; } .bb-cover { background: black; height: 150px; width: calc(100% - 40px); position: absolute; top: 84px; left: 20px; } </style>