Skip to content

拍照

vue
<template>
  <div class="py-4">
    <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="takePhoto">开始拍照</button>
    <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="stopCamera">结束拍照</button>
  </div>

  <div class="flex">
    <video ref="videoRef" class="w-[480px] h-[640px] border"></video>
    <img v-show="imageUrl" :src="imageUrl" class="ml-4 object-cover border">
  </div>
</template>

<script setup>
import { createCamera } from "./camera";
import { onMounted, ref } from "vue";

const videoRef = ref(null);
const imageUrl = ref(null);

let camera = null;
onMounted(async () => {
  camera = createCamera(videoRef.value);
  await camera.start();
});

async function takePhoto() {
  const {blob, url} = await camera.takePhoto();
  imageUrl.value = url;
  console.log("url:", url);
  console.log("file:", blob);
}

function stopCamera() {
  camera.stop();
}
</script>
js
const isCallable = (fn) => typeof fn === "function";
const extend = Object.assign;

// 打开摄像头拍照
class Camera {
  constructor(videoEle) {
    this.isMediaDevicesSupport();
    this.isStarted = false;
    this.stream = null;
    this.videoElement = null;
    this.canvasElement = null;
    this.canvas2dContext = null;
    this.init(videoEle);
  }

  isMediaDevicesSupport() {
    if (!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia)) {
      throw new Error("[Camera]browser not support navigator.mediaDevices API");
    }
  }

  init(videoEle) {
    this._initVideoElement(videoEle);
    this._initCanvasElement();
  }

  _initVideoElement(videoEle) {
    if (!(videoEle instanceof HTMLVideoElement)) {
      throw new Error("[Camera]paramter must be instanceof HTMLVideoElement");
    }
    videoEle.autoplay = true;
    videoEle.muted = true;
    videoEle.playsInline = true;
    videoEle.style.objectFit = "cover";
    videoEle.style.transform = "scaleX(-1)";
    this.videoElement = videoEle;
  }

  _initCanvasElement() {
    const canvas = document.createElement("canvas");
    this.canvasElement = canvas;
    this.canvas2dContext = canvas.getContext("2d");
  }

  start(opts = {}) {
    if (this.isStarted) {
      return;
    }

    const options = extend({
      onError: (err) => {
        console.error("[Camera]fail to start camera:", err);
      },
      onReady: ({ videoElement, stream }) => {
        videoElement.srcObject = stream;
        videoElement.onloadedmetadata = () => videoElement.play();
      },
      onStarted: null,
      onFinal: null,
      videoConstraints: true,
    }, opts);

    const { onReady, onStarted, onError, onFinal, videoConstraints } = options;

    return navigator.mediaDevices.getUserMedia({
      // please visit mdn docs for more details:
      // https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
      video: videoConstraints,
      audio: false,
    })
      .then((stream) => {
        this.stream = stream;
        isCallable(onReady) && onReady(this);
        isCallable(onStarted) && onStarted(this);
      })
      .catch((err) => {
        isCallable(onError) && onError(err);
      })
      .finally(() => {
        this.isStarted = true;
        isCallable(onFinal) && onFinal(this);
      });
  }

  stop() {
    if (!this.isStarted) {
      return;
    }

    this.stream.getTracks().forEach((track) => track.stop());
    this.isStarted = false;
  }

  // 注意点: 因为无法随意设置摄像头视频的尺寸,
  // 那么在使用 canvas 绘图的时候就不能直接绘制视频流的尺寸
  // 而是应该绘制 video 元素的尺寸, 因为 video 元素的尺寸是可以
  // 可以通过css随意控制的, 只需要给 video 设置 object-fit: cover
  // 就可以让视频拉伸覆盖整个 video 元素
  takePhoto(opts = {}) {
    if (!this.isStarted) {
      throw new Error("[Camera]please start camera first");
    }

    const video = this.videoElement;
    const canvas = this.canvasElement;
    const {
      clientWidth: containerWidth,
      clientHeight: containerHeight,
      videoWidth,
      videoHeight,
    } = video;

    canvas.width = containerWidth;
    canvas.height = containerHeight;

    const scaleX = containerWidth / videoWidth;
    const scaleY = containerHeight / videoHeight;
    const scale = Math.max(scaleX, scaleY);
    if (containerWidth > videoWidth) {
      // grow
      const scaledWidth = videoWidth * scale;
      const scaledHeight = videoHeight * scale;
      const offsetX = (containerWidth - scaledWidth) / 2;
      const offsetY = (containerHeight - scaledHeight) / 2;
      this.canvas2dContext.drawImage(
        video,
        0,
        0,
        videoWidth,
        videoHeight,
        offsetX,
        offsetY,
        scaledWidth,
        scaledHeight,
      );
    } else {
      // shrink
      const displayWidth = videoWidth * scale;
      const displayHeight = videoHeight * scale;
      const offsetX = (displayWidth - containerWidth) / 2;
      const offsetY = (displayHeight - containerHeight) / 2;
      this.canvas2dContext.drawImage(
        video,
        offsetX / scale,
        offsetY / scale,
        containerWidth / scale,
        containerHeight / scale,
        0,
        0,
        containerWidth,
        containerHeight,
      );
    }

    // canvas.toBlob options:
    // https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob#Syntax
    const options = extend({
      imageType: "image/png",
      quality: 1,
      autoPauseVideo: false,
    }, opts);

    const allowImageTypes = ["image/png", "image/jpeg"];
    if (!allowImageTypes.includes(options.imageType)) {
      throw new Error("[Camera]only support image/png and image/jpeg");
    }
    if (options.quality < 0 || options.quality > 1) {
      throw new RangeError("[Camera]quality must be between 0 and 1");
    }

    const { imageType, quality, autoPauseVideo } = options;
    return new Promise((resolve) => {
      canvas.toBlob(
        (blob) => {
          const url = URL.createObjectURL(blob);
          autoPauseVideo && this.videoElement.pause();
          resolve({ blob, url });
        },
        imageType,
        quality,
      );
    });
  }
}

export const createCamera = (videoEle) => new Camera(videoEle);

录制麦克风/摄像头/显示器

vue
<template>
  <div class="container mx-auto overflow-x-auto">
    <div class="py-10">
      <h2 class="text-center text-lg">使用 navigator.mediaDevices WebAPI 录麦克风/录摄像头/录屏</h2>
      <p class="text-sm text-center">也可以用于测试电脑麦克风/摄像头硬件是否正常,有的时候可能会有电流麦,摄像头模糊等问题</p>
    </div>

    <!-- audio -->
    <div class="border-b pb-10">
      <div class="py-4">
        <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="startAudioRecorder">开始录音</button>
        <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="stopAudioRecorder">结束录音</button>
      </div>
      <div class="flex items-center">
        <div class="w-1/2">{{ audioMsg }}</div>
        <audio class="w-1/2" ref="audioRef" controls="true"></audio>
      </div>
    </div>

    <!-- camera -->
    <div class="border-b pb-10">
      <div class="py-4">
        <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="startCameraRecorder">开始录摄像头</button>
        <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="stopCameraRecorder">结束录摄像头</button>
      </div>
      <div class="flex items-center">
        <video ref="cameraRef" class="w-1/2"></video>
        <video ref="cameraVideoRef" class="w-1/2" controls="true"></video>
      </div>
    </div>

    <!-- screen -->
    <div class="pb-10">
      <div class="py-4">
        <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="startScreenRecorder">开始录屏</button>
        <button class="px-4 py-2 mr-2 bg-blue-500 text-white" @click="stopScreenRecorder">结束录屏</button>
      </div>
      <div class="flex items-center">
        <video ref="screenRef" class="w-1/2"></video>
        <video ref="screenVideoRef" class="w-1/2" controls="true"></video>
      </div>
    </div>
  </div>
</template>

<script setup>
import { onMounted, ref } from "vue";
import { createAudioRecorder, createCameraRecorder, createScreenRecorder } from "./recorder.js";

// 录麦克风
const audioRef = ref(null);
const audioMsg = ref("麦克风未启动");

let audioRecorder = null;
onMounted(async () => {
  audioRecorder = createAudioRecorder();
  await audioRecorder.init();
});
async function startAudioRecorder() {
  await audioRecorder.start();
  audioMsg.value = "请说话...";
}
async function stopAudioRecorder() {
  const { url, blob } = await audioRecorder.stop();
  audioMsg.value = "录音已结束";
  audioRef.value.src = url;
  console.log("stopAudioRecorder blob", blob);
}

// 录摄像头
const cameraRef = ref(null);
const cameraVideoRef = ref(null);
let cameraRecorder = null;
onMounted(async () => {
  // 直接初始化
  cameraRecorder = createCameraRecorder();
  await cameraRecorder.init(cameraRef.value);
});

async function startCameraRecorder() {
  await cameraRecorder.start();
}
async function stopCameraRecorder() {
  const { url, blob } = await cameraRecorder.stop();
  cameraVideoRef.value.src = url;
  console.log("stopCameraRecorder blob", blob);
}

// 录屏幕
const screenRef = ref(null);
const screenVideoRef = ref(null);
let screenRecorder = null;
onMounted(() => {
  screenRecorder = createScreenRecorder();
});

async function startScreenRecorder() {
  // 点击时再初始化
  await screenRecorder.init(screenRef.value);
  await screenRecorder.start();
}

async function stopScreenRecorder() {
  const { url, blob } = await screenRecorder.stop();
  screenVideoRef.value.src = url;
  console.log("stopScreenRecorder blob", blob);
}
</script>
js
import { RecordRTCPromisesHandler } from "recordrtc";

// 基类
export class BaseRecorder {
  constructor() {
    this._isMediaDeviceSupport();
    this.isInitialized = false;
    this.stream = null;
    this.rtcRecorder = null;
  }

  async init() {
    // must be implemented
    return Promise.reject(new Error("[BaseRecorder]Not implemented"));
  }

  _isInitialized() {
    if (!this.isInitialized) {
      throw new Error("[BaseRecorder]Recorder is not initialized");
    }
  }

  _isMediaDeviceSupport() {
    if (!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia)) {
      throw new Error("[BaseRecorder]browser not support navigator.mediaDevices API");
    }
  }

  async start() {
    this._isInitialized();
    return this.rtcRecorder.startRecording();
  }

  async stop() {
    this._isInitialized();
    await this.rtcRecorder.stopRecording();

    const blob = await this.rtcRecorder.getBlob();
    const url = URL.createObjectURL(blob);

    this.stream.getTracks().forEach(track => track.stop());
    await this.rtcRecorder.reset();
    await this.rtcRecorder.destroy();

    return {
      blob,
      url,
    };
  }
}

// 录音
class AudioRecorder extends BaseRecorder {
  async init() {
    this.stream = await navigator.mediaDevices.getUserMedia({
      audio: true,
    });
    this.rtcRecorder = new RecordRTCPromisesHandler(this.stream, {
      type: "audio",
      recorderType: RecordRTCPromisesHandler.StereoAudioRecorder,
    });
    this.isInitialized = true;
  }
}

// 录制视频抽象类
// 如果要自定义录制的视频的参数(比如:是否同时录制声音/视频的尺寸等)
// 可以自定义一个子类实现这个类, 然后实现 initStream 方法即可即可
// 推荐阅读 MDN 文档获得更多信息
// https://developer.mozilla.org/zh-CN/docs/Web/API/MediaDevices/getUserMedia
export class VideoRecorderAbstruct extends BaseRecorder {
  videoElement = null;

  async initStream() {
    throw new Error("[VideoRecorderAbstruct]Not implemented");
  }

  async init(el) {
    this.stream = await this.initStream();
    this.rtcRecorder = new RecordRTCPromisesHandler(this.stream, {
      type: "video",
      recorderType: RecordRTCPromisesHandler.MediaStreamRecorder,
    });
    this.initVideoElement(el);
    this.isInitialized = true;
  }

  initVideoElement(videoEl) {
    if (!(videoEl instanceof HTMLVideoElement)) {
      throw new Error("[Camera]paramter must be instanceof HTMLVideoElement");
    }
    videoEl.autoplay = true;
    videoEl.muted = true;
    videoEl.playsInline = true;
    videoEl.style.objectFit = "cover";
    videoEl.onloadedmetadata = () => videoEl.play();
    videoEl.srcObject = this.stream;
    this.videoElement = videoEl;
  }
}

// 录制摄像头(视频)
class CameraRecorder extends VideoRecorderAbstruct {
  async initStream() {
    return navigator.mediaDevices.getUserMedia({
      audio: true,
      video: true,
    });
  }
}

// 录制显示器屏幕(视频)
export class ScreenRecorder extends VideoRecorderAbstruct {
  async initStream() {
    return await navigator.mediaDevices.getDisplayMedia({
      audio: true,
      video: true,
    });
  }
}

export const createAudioRecorder = () => new AudioRecorder();
export const createCameraRecorder = () => new CameraRecorder();
export const createScreenRecorder = () => new ScreenRecorder();

源码 & 在线预览

Released under the MIT License.