Skip to content

实现原理

在线示例

相关库的作用

其实实现相关功能的库有很多, 请查看 但是目前我使用的是 recordrtc, 我觉得这个库的功能比较全

1.安装依赖

sh
pnpm install -D @types/recordrtc
pnpm install recordrtc
pnpm install webrtc-adapter

2.封装 API

ts
import { RecordRTCPromisesHandler, type Options } from "recordrtc";

export abstract class BaseRecorder {
  stream?: MediaStream;
  rtcRecorder?: RecordRTCPromisesHandler;
  options?: Options;

  // must be implemented, resolve MediaStream
  abstract initStream(): Promise<MediaStream>;

  constructor(options?: Options) {
    const defaultOptions: Options = {
      disableLogs: true,
    };
    this.options = Object.assign(defaultOptions, options);
  }

  async initRecorder(options?: Options): Promise<void> {
    const stream = await this.initStream();
    this.stream = stream;
    this.rtcRecorder = new RecordRTCPromisesHandler(stream, options);
  }

  async start(options?: Options) {
    await this.initRecorder(options);
    return this.rtcRecorder?.startRecording();
  }

  async pause() {
    return this.rtcRecorder?.pauseRecording();
  }

  async resume() {
    return this.rtcRecorder?.resumeRecording();
  }

  async stop() {
    await this.rtcRecorder?.stopRecording();

    const blob = await this.rtcRecorder?.getBlob();

    this.stream?.getTracks().forEach((track) => track.stop());
    await this.rtcRecorder?.reset();
    await this.rtcRecorder?.destroy();

    return blob;
  }
}
ts
import { StereoAudioRecorder } from "recordrtc";
import { BaseRecorder } from "./BaseRecorder";

export class AudioRecorder extends BaseRecorder {
  constructor() {
    super({
      type: "audio",
      recorderType: StereoAudioRecorder,
    });
  }

  async initStream() {
    return await navigator.mediaDevices.getUserMedia({
      audio: true,
    });
  }
}

export const createAudioRecorder = () => new AudioRecorder();
ts
import { BaseRecorder } from "./BaseRecorder";
import { MediaStreamRecorder } from "recordrtc";

export abstract class VideoRecorderAbstruct extends BaseRecorder {
  videoElement?: HTMLVideoElement;

  constructor() {
    super({
      // docs: https://recordrtc.org/
      type: "video",
      mimeType: "video/mp4",
      recorderType: MediaStreamRecorder,
    });
  }

  initVideoElement(videoEl: HTMLVideoElement) {
    if (!(videoEl instanceof HTMLVideoElement)) {
      throw new Error("[Camera]paramter must be instanceof HTMLVideoElement");
    }
    videoEl.autoplay = true;
    videoEl.muted = true;
    videoEl.playsInline = true;
    videoEl.style.objectFit = "cover";
    videoEl.onloadedmetadata = () => videoEl.play(); // autoplay
    videoEl.srcObject = this.stream!;
    this.videoElement = videoEl;
  }
}

export class CameraRecorder extends VideoRecorderAbstruct {
  async initStream() {
    return navigator.mediaDevices.getUserMedia({
      audio: true,
      video: true,
    });
  }
}

export class ScreenRecorder extends VideoRecorderAbstruct {
  async initStream() {
    return await navigator.mediaDevices.getDisplayMedia({
      audio: true,
      video: true,
    });
  }
}

export const createCameraRecorder = () => new CameraRecorder();
export const createScreenRecorder = () => new ScreenRecorder();
ts
import { VideoRecorderAbstruct } from "./VideoRecorder";

export interface TakePhotoOptions {
  imageType: "image/png" | "image/jpeg" | "image/webp";
  quality: number;
  autoPauseVideo: boolean;
}

export class Camera extends VideoRecorderAbstruct {
  canvasElement?: HTMLCanvasElement;
  canvas2dContext: CanvasRenderingContext2D | null = null;

  // override
  async initStream() {
    return await navigator.mediaDevices.getUserMedia({
      audio: false,
      video: true,
    });
  }

  // override: dont create RecordRTCPromisesHandler instance, because have not use it
  async start() {
    this.initCanvasElement();
    this.stream = await this.initStream();
  }
  initCanvasElement() {
    const canvas = document.createElement("canvas");
    this.canvasElement = canvas;
    this.canvas2dContext = canvas.getContext("2d");
  }

  // override: generate Blob object of picture
  async stop(opts?: TakePhotoOptions) {
    const defaultOpts = {
      imageType: "image/png",
      quality: 1,
      autoPauseVideo: true,
    };

    const options = Object.assign(defaultOpts, opts);
    if (options.quality < 0 || options.quality > 1) {
      throw new RangeError("quality value must be between 0 and 1");
    }

    return this._draw(options);
  }

  private _draw(options: TakePhotoOptions) {
    const video = this.videoElement!;
    const canvas = this.canvasElement!;
    const { clientWidth, clientHeight, videoWidth, videoHeight } = video;

    // sync video element width and height to canvas
    canvas.width = clientWidth;
    canvas.height = clientHeight;

    const scaleX = clientWidth / videoWidth;
    const scaleY = clientHeight / videoHeight;
    const scale = Math.max(scaleX, scaleY);
    if (clientWidth > videoWidth) {
      // grow
      const scaledWidth = videoWidth * scale;
      const scaledHeight = videoHeight * scale;
      const offsetX = (clientWidth - scaledWidth) / 2;
      const offsetY = (clientHeight - scaledHeight) / 2;
      this.canvas2dContext!.drawImage(
        video,
        0,
        0,
        videoWidth,
        videoHeight,
        offsetX,
        offsetY,
        scaledWidth,
        scaledHeight,
      );
    } else {
      // shrink
      const displayWidth = videoWidth * scale;
      const displayHeight = videoHeight * scale;
      const offsetX = (displayWidth - clientWidth) / 2;
      const offsetY = (displayHeight - clientHeight) / 2;
      this.canvas2dContext!.drawImage(
        video,
        offsetX / scale,
        offsetY / scale,
        clientWidth / scale,
        clientHeight / scale,
        0,
        0,
        clientWidth,
        clientHeight,
      );
    }

    const { imageType, quality, autoPauseVideo } = options;
    return new Promise<Blob>((resolve) => {
      canvas.toBlob(
        (blob) => {
          autoPauseVideo && video.pause();
          resolve(blob as Blob);
        },
        imageType,
        quality,
      );
    });
  }
}

export const createCamera = () => new Camera();
ts
import "webrtc-adapter"; // for compatible old browser
export * from "./AudioRecorder";
export * from "./VideoRecorder";
export * from "./Camera";
export * from "./vue-hooks";

3.封装 vue hooks

ts
import { computed, onMounted, ref } from "vue";
import { AudioRecorder, createAudioRecorder } from "./AudioRecorder";
import { Camera, createCamera } from "./Camera";
import { CameraRecorder, createCameraRecorder, createScreenRecorder, ScreenRecorder } from "./VideoRecorder";

export type RecorderInst = Camera | CameraRecorder | ScreenRecorder | AudioRecorder;
export interface RecorderOptions {
  type: "audio" | "screen" | "camera" | "take-photo";
}

export const recorderFactoryMap = {
  audio: createAudioRecorder,
  screen: createScreenRecorder,
  camera: createCameraRecorder,
  "take-photo": createCamera,
};

// 注: 这个是内部使用的, 不需要暴露出去
function internalRecorderFactory(options: RecorderOptions) {
  const previewDomRef = ref<HTMLVideoElement>();
  const resultDomRef = ref();
  const resultBlob = ref<Blob | undefined>();
  const resultUrl = computed<string>(() => {
    if (resultBlob.value) {
      return URL.createObjectURL(resultBlob.value);
    }
    return "";
  });

  // onMounted: init recorderInst
  const factory = recorderFactoryMap[options.type];
  let recorderInst: RecorderInst | undefined;
  onMounted(() => {
    recorderInst = factory();
  });

  async function start() {
    await recorderInst!.start();

    // handle preview dom
    if (previewDomRef.value) {
      (recorderInst as ScreenRecorder).initVideoElement(previewDomRef.value);
    }
  }

  async function stop() {
    const blob = await recorderInst!.stop();
    resultBlob.value = blob;

    // handle result dome
    if (resultDomRef.value) {
      resultDomRef.value.src = resultUrl.value;
    }
  }

  return {
    start,
    stop,
    previewDomRef,
    resultDomRef,
    resultBlob,
    resultUrl,
    recorderInst,
  };
}

export const useAudioRecorder = () => internalRecorderFactory({ type: "audio" });
export const useScreenRecorder = () => internalRecorderFactory({ type: "screen" });
export const useCameraRecorder = () => internalRecorderFactory({ type: "camera" });
export const useCameraTakePhoto = () => internalRecorderFactory({ type: "take-photo" });

4.文件结构

txt
.
├── README.md
├── index.html
├── package.json
├── pnpm-lock.yaml
├── public
│   └── vite.svg
├── src
│   ├── App.vue
│   ├── AudioRecorder.vue
│   ├── CameraRecorder.vue
│   ├── ScreenRecorder.vue
│   ├── TakePhoto.vue
│   ├── main.ts
│   ├── recorder
│   │   ├── AudioRecorder.ts
│   │   ├── BaseRecorder.ts
│   │   ├── Camera.ts
│   │   ├── VideoRecorder.ts
│   │   ├── index.ts
│   │   └── vue-hooks.ts
│   ├── style.css
│   └── vite-env.d.ts
├── tsconfig.app.json
├── tsconfig.json
├── tsconfig.node.json
└── vite.config.ts

4 directories, 24 files

5.使用 API

录音频

vue
<template>
  <!-- audio -->
  <div class="border-b pb-10">
    <div class="py-4">
      <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="startAudioRecorder">开始录音</button>
      <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="stopAudioRecorder">结束录音</button>
    </div>
    <div class="flex items-center">
      <div class="w-1/2">{{ audioMsg }}</div>
      <audio class="w-1/2" ref="audioRef" controls="true"></audio>
    </div>
  </div>
</template>

<script lang="ts" setup>
import { onMounted, ref } from "vue";
import { createAudioRecorder, AudioRecorder } from "./recorder";

// 录麦克风
const audioRef = ref();
const audioMsg = ref("麦克风未启动");

let audioRecorder: AudioRecorder | null = null;
onMounted(() => {
  audioRecorder = createAudioRecorder();
});
async function startAudioRecorder() {
  await audioRecorder!.start();
  audioMsg.value = "请说话...";
}
async function stopAudioRecorder() {
  const blob = await audioRecorder!.stop();
  const url = URL.createObjectURL(blob!);
  audioMsg.value = "录音已结束";
  audioRef.value.src = url;
  console.log("audio-result", { url, blob });
}
</script>

录视频

录制摄像头

vue
<template>
  <div class="border-b pb-10">
    <div class="py-4">
      <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="startCameraRecorder">开始录摄像头</button>
      <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="stopCameraRecorder">结束录摄像头</button>
    </div>
    <div class="flex items-center">
      <video ref="cameraPreviewRef" class="w-1/2 bg-black"></video>
      <video ref="cameraResultRef" class="w-1/2" controls="true"></video>
    </div>
  </div>
</template>

<script lang="ts" setup>
import { onMounted, ref } from "vue";
import { createCameraRecorder, CameraRecorder } from "./recorder";

// 录屏幕
const cameraPreviewRef = ref(); // 实时预览
const cameraResultRef = ref(); // 录制结果
let screenRecorder: null | CameraRecorder = null;
onMounted(() => {
  screenRecorder = createCameraRecorder();
});

async function startCameraRecorder() {
  // 点击时再初始化(请求权限)
  await screenRecorder!.start();
  screenRecorder!.initVideoElement(cameraPreviewRef.value);
}

async function stopCameraRecorder() {
  const blob = await screenRecorder!.stop();
  const url = URL.createObjectURL(blob!);
  cameraResultRef.value.src = url;
  console.log("stopScreenRecorder blob", blob);
}
</script>

录制显示器(屏幕)

vue
<template>
  <div class="pb-10">
    <div class="py-4">
      <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="startScreenRecorder">开始录屏</button>
      <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="stopScreenRecorder">结束录屏</button>
    </div>
    <div class="flex items-center">
      <video ref="screenPreviewRef" class="w-1/2 bg-black"></video>
      <video ref="screeeResultRef" class="w-1/2" controls="true"></video>
    </div>
  </div>
</template>

<script lang="ts" setup>
import { onMounted, ref } from "vue";
import { createScreenRecorder, ScreenRecorder } from "./recorder";

// 录屏幕
const screenPreviewRef = ref(); // 实时预览
const screeeResultRef = ref(); // 录制结果
let screenRecorder: null | ScreenRecorder = null;
onMounted(() => {
  screenRecorder = createScreenRecorder();
});

async function startScreenRecorder() {
  // 点击时再初始化(请求权限)
  await screenRecorder!.start();
  screenRecorder!.initVideoElement(screenPreviewRef.value);
}

async function stopScreenRecorder() {
  const blob = await screenRecorder!.stop();
  const url = URL.createObjectURL(blob!);
  screeeResultRef.value.src = url;
  console.log("stopScreenRecorder blob", blob);
}
</script>

拍照

vue
<template>
  <div class="border-b pb-10">
    <div class="py-4">
      <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="takePhoto">开始拍照</button>
      <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="stopCamera">结束拍照</button>
    </div>

    <div class="flex">
      <!-- 注意镜像效果 scale-x-[-1] -->
      <video ref="videoRef" class="w-[480px] h-[640px] border scale-x-[-1]"></video>
      <img v-show="imageUrl" :src="imageUrl" class="ml-4 object-cover border scale-x-[-1]" />
    </div>
  </div>
</template>

<script setup lang="ts">
import { onMounted, ref } from "vue";
import { Camera, createCamera } from "./recorder";

const videoRef = ref();
const imageUrl = ref("");

let camera: null | Camera = null;
onMounted(() => {
  camera = createCamera();
});

async function takePhoto() {
  camera!.start();
  camera!.initVideoElement(videoRef.value);
}

async function stopCamera() {
  const blob = await camera!.stop();
  const url = URL.createObjectURL(blob!);
  imageUrl.value = url;
}
</script>

6.使用 vue hooks

录音频

vue
<template>
  <!-- audio -->
  <div class="border-b pb-10">
    <div class="py-4">
      <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="startAudioRecorder">开始录音</button>
      <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="stopAudioRecorder">结束录音</button>
    </div>
    <div class="flex items-center">
      <div class="w-1/2">{{ audioMsg }}</div>
      <audio class="w-1/2" ref="resultDomRef" controls="true"></audio>
    </div>
  </div>
</template>

<script lang="ts" setup>
import { ref } from "vue";
import { useAudioRecorder } from "./recorder";

// 录麦克风
const audioMsg = ref("麦克风未启动");

const { start, stop, resultBlob, resultDomRef } = useAudioRecorder();

async function startAudioRecorder() {
  await start();
  audioMsg.value = "请说话...";
}

async function stopAudioRecorder() {
  await stop();
  audioMsg.value = "录音已结束, 请点击开始录音";
  console.log("resultBlob:", resultBlob);
}
</script>

录视频

录制摄像头

vue
<template>
  <div class="border-b pb-10">
    <div class="py-4">
      <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="handleStart">开始录摄像头</button>
      <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="handleStop">结束录摄像头</button>
    </div>
    <div class="flex items-center">
      <video ref="previewDomRef" class="w-1/2 bg-black"></video>
      <video ref="resultDomRef" class="w-1/2" controls="true"></video>
    </div>
  </div>
</template>

<script lang="ts" setup>
import { useCameraRecorder } from "./recorder";

// 录屏幕
const { start, stop, previewDomRef, resultDomRef, resultBlob } = useCameraRecorder();

async function handleStart() {
  await start();
}

async function handleStop() {
  await stop();
  console.log("resultBlob:", resultBlob);
}
</script>

录制显示器(屏幕)

vue
<template>
  <div class="pb-10">
    <div class="py-4">
      <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="startScreenRecorder">开始录屏</button>
      <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="stopScreenRecorder">结束录屏</button>
    </div>
    <div class="flex items-center">
      <video ref="previewDomRef" class="w-1/2 bg-black"></video>
      <video ref="resultDomRef" class="w-1/2" controls="true"></video>
    </div>
  </div>
</template>

<script lang="ts" setup>
import { useScreenRecorder } from "./recorder";

const { start, stop, resultBlob, previewDomRef, resultDomRef } = useScreenRecorder();

// 录屏幕
async function startScreenRecorder() {
  await start();
}

async function stopScreenRecorder() {
  await stop();
  console.log("resultBlob:", resultBlob);
}
</script>

拍照

vue
<template>
  <div class="border-b pb-10">
    <div class="py-4">
      <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="takePhoto">开始拍照</button>
      <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="stopCamera">结束拍照</button>
    </div>

    <div class="flex">
      <!-- 注意镜像效果 scale-x-[-1] -->
      <video ref="previewDomRef" class="w-[480px] h-[640px] border scale-x-[-1]"></video>
      <img ref="resultDomRef" class="ml-4 object-cover border" />
    </div>
  </div>
</template>

<script setup lang="ts">
import { useCameraTakePhoto } from "./recorder";

const { start, stop, previewDomRef, resultDomRef, resultBlob } = useCameraTakePhoto();

async function takePhoto() {
  await start();
}

async function stopCamera() {
  await stop();
  console.log("resultBlob:", resultBlob);
}
</script>

Released under the MIT License.