From 19a87a3ecbeb53d5c6e90d2c3a09b0bca6eb9575 Mon Sep 17 00:00:00 2001 From: fzk <458813868@qq.com> Date: Fri, 14 Jun 2024 10:03:51 +0800 Subject: [PATCH] =?UTF-8?q?fix:=20=E5=BD=95=E5=B1=8F=20bug?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../recorderScreen/ScreenRecorder.tsx | 43 +++++++++++-------- 1 file changed, 24 insertions(+), 19 deletions(-) diff --git a/packages/web/src/components/recorderScreen/ScreenRecorder.tsx b/packages/web/src/components/recorderScreen/ScreenRecorder.tsx index 0defed5..23b384b 100644 --- a/packages/web/src/components/recorderScreen/ScreenRecorder.tsx +++ b/packages/web/src/components/recorderScreen/ScreenRecorder.tsx @@ -1,9 +1,7 @@ import { ExclamationCircleFilled } from '@ant-design/icons'; import Timer from '@pear-rec/timer'; import useTimer from '@pear-rec/timer/src/useTimer'; -import { mp4StreamToOPFSFile } from '@webav/av-cliper'; -import { AVRecorder } from '@webav/av-recorder'; -import { Button, Modal } from 'antd'; +import { Modal } from 'antd'; import { saveAs } from 'file-saver'; import { useEffect, useRef, useState } from 'react'; import { useTranslation } from 'react-i18next'; @@ -29,8 +27,8 @@ const ScreenRecorder = (props) => { const mediaStream = useRef(); // 视频和系统声音流 const micStream = useRef(); // 麦克风声音流 const combinedStream = useRef(); // 合并流 - const mediaRecorder = useRef(); // 媒体录制器对象 - const outputStream = useRef(); + const mediaRecorder = useRef(); // 媒体录制器对象 + const recordedChunks = useRef([]); const [isRecording, setIsRecording] = useState(false); // 标记是否正在录制 const [isSave, setIsSave] = useState(false); const [percent, setPercent] = useState(0); @@ -60,14 +58,6 @@ const ScreenRecorder = (props) => { }; }, []); - useEffect(() => { - (async () => { - if (outputStream.current == null) return; - const opfsFile = await mp4StreamToOPFSFile(outputStream.current); - type == 'gif' ? transcodeGif(opfsFile) : saveFile(opfsFile); - })(); - }, [outputStream.current]); - async function getCurrentUser() { try { let user = await db.users.where({ userType: 1 }).first(); @@ -212,9 +202,25 @@ const ScreenRecorder = (props) => { } else { combinedStream.current = mediaStream.current; } - const recodeMS = combinedStream.current.clone(); - const size = window.isElectron ? await window.electronAPI?.invokeRsGetBoundsClip() : props.size; - mediaRecorder.current = new AVRecorder(recodeMS, { width: size.width, height: size.height }); + mediaRecorder.current = new MediaRecorder(combinedStream.current); + mediaRecorder.current.addEventListener('dataavailable', (e) => { + if (e.data.size > 0) { + recordedChunks.current.push(e.data); + } + }); + mediaRecorder.current.addEventListener('stop', () => { + exportRecording(); + }); + } + + // 导出录制的音频文件 + function exportRecording() { + if (recordedChunks.current.length > 0) { + const blob = new Blob(recordedChunks.current, { + type: 'video/webm', + }); + type == 'gif' ? transcodeGif(blob) : saveFile(blob); + } } async function handleShotScreen() { @@ -257,8 +263,6 @@ const ScreenRecorder = (props) => { async function handleStartRecord() { await setMediaRecorder(); await mediaRecorder.current.start(); - outputStream.current = mediaRecorder.current.outputStream; - setIsRecording(true); props.setIsRecording && props.setIsRecording(true); timer.start(); @@ -287,6 +291,7 @@ const ScreenRecorder = (props) => { worker.postMessage({ status: 'stop', }); + recordedChunks.current = []; } async function loadFfmpeg() { @@ -330,7 +335,7 @@ const ScreenRecorder = (props) => { } async function saveFile(blob) { - const fileName = `pear-rec_${+new Date()}.${type == 'gif' ? 'gif' : 'mp4'}`; + const fileName = `pear-rec_${+new Date()}.${type == 'gif' ? 'gif' : 'webm'}`; if (window.isElectron) { const url = URL.createObjectURL(blob); window.electronAPI.sendRsDownloadVideo({ url, fileName: fileName });