第6章-媒体流与轨道

这一章主要讲了如何在页面进行轨道的捕获、视频信息捕获及转发、canvs画板信息捕获及转发:

  1. Video.captureStream 捕获视频信息
  2. Canvas.captureStream 捕获Canvas信息

  代码:

  

/*
 *                        .::::.
 *                      .::::::::.
 *                     :::::::::::
 *                  ..:::::::::::'
 *               '::::::::::::'
 *                 .::::::::::
 *            '::::::::::::::..
 *                 ..::::::::::::.
 *               ``::::::::::::::::
 *                ::::``:::::::::'        .:::.
 *               ::::'   ':::::'       .::::::::.
 *             .::::'      ::::     .:::::::'::::.
 *            .:::'       :::::  .:::::::::' ':::::.
 *           .::'        :::::.:::::::::'      ':::::.
 *          .::'         ::::::::::::::'         ``::::.
 *      ...:::           ::::::::::::'              ``::.
 *     ````':.          ':::::::::'                  ::::..
 *                        '.:::::'                    ':'````..
 * @Descripttion: 媒体流与轨道
 * @version: v1.0
 * @Author: 李雯
 * @Date: 2022-01-08 13:54:22
 * @LastEditors: 李雯
 * @LastEditTime: 2022-01-08 14:05:24
 */
import React from "react";
import { Divider, Button, Layout, message, Progress } from "antd";
import SoundMeter from "./common/js/soundmeter";

const { Content } = Layout;

interface IProps {}
interface IState {
  /**
   * @description: 音量大小
   */
  audioLevel: number | 0;
}

/**
 * @description: 控件集合
 */
const refs = {
  /**
   * @description: 视频ref
   */
  myVideoRef: React.createRef<HTMLVideoElement>(),
  /**
   * @description: 播放捕获视频ref
   */
  plyarVideoRef: React.createRef<HTMLVideoElement>(),
};

/**
 * @description: 静态变量
 */
const models = {
  /**
   * @description: 音视频当前配置
   */
  defaultConstraints: {
    audio: true as boolean,
    video: true as boolean,
  },
  /**
   * @description: 音视频媒体信息
   */
  stream: null as MediaStream,
  /**
   * @description: 音量监测定时器
   */
  audioTimer: null as NodeJS.Timer,
  /**
   * @description: 音频计量
   */
  soundMeter: null as SoundMeter,
  /**
   * @description: 音频管理API
   */
  audioContext: null as AudioContext,
   /**
   * @description: 帧率
   */
  fps: 0 as number,
};

declare global {
  interface Window {
    /**
     * @description: 音频管理API
     */
    webkitAudioContext: any;
  }
  interface HTMLVideoElement {
    /**
     * @description: 视频捕获函数
     */
    captureStream: any;
    /**
     * @description: 视频捕获函数
     */
    mozCaptureStream: any;
  }
}

/**
 * @description: 媒体流与轨道
 */
class MediaStreamingAndTrack extends React.Component<IProps, IState> {
  /**
   * @description: 构造函数
   * @param {IProps} props 参数
   */
  constructor(props: IProps) {
    super(props);
    this.state = {
      audioLevel: 0,
    };
  }
  /**
   * @description: 渲染
   */
  render() {
    return (
      <div className="container">
        <h1>
          <span>媒体流与轨道</span>
        </h1>
        源视频
        <video className="video" ref={refs.myVideoRef} autoPlay playsInline />
        捕获视频
        <video className="video" ref={refs.plyarVideoRef} autoPlay playsInline />
        音量:
        <Progress
          percent={this.state.audioLevel}
          status="active"
          style={{ width: "300px" }}
        />
        <Content style={{ width: "640px" }}>
          <Divider plain>流与轨道API测试</Divider>
          <Button onClick={this.getAllTrack}>获取所有轨道</Button>
          <Button onClick={this.getAudioTrack}>获取音频轨道</Button>
          <Button onClick={this.getAudioTrackById}>根据ID获取音频轨道</Button>
          <Button onClick={this.deleteAudioTrack}>删除音频轨道</Button>
          <Button onClick={this.getVideoTrack}>获取视频轨道</Button>
          <Button onClick={this.deleteVideoTrack}>删除视频轨道</Button>
        </Content>
      </div>
    );
  }
  /**
   * @description: 页面加载完成后执行
   */
  componentDidMount() {
    this.openDevice();
    this.canPlay();
  }
  /**
   * @description: 打开设备
   */
  openDevice = () => {
    navigator.mediaDevices
      .getUserMedia(models.defaultConstraints)
      .then(this.openDeviceSuccess)
      .catch(this.openDeviceError);
  };
  /**
   * @description: 打开设备成功触发
   * @param {MediaStream} e 媒体信息
   */
  openDeviceSuccess = (e: MediaStream): void => {
    const video = refs.myVideoRef.current;
    models.stream = e;
    video.srcObject = e;
    this.volumeDetection();
    models.soundMeter.connectToSource(models.stream);
    if (models.audioTimer) {
      clearInterval(models.audioTimer);
    }
    models.audioTimer = setInterval(() => {
      // 读取音量值,在乘以一个系数,可以得到音量的宽度
      const number = parseFloat(models.soundMeter.instant.toFixed(2));
      const val = parseFloat((number * 348).toFixed(2)) + 1;
      this.setState({ audioLevel: val });
    }, 1000 * 0.1);
  };
  /**
   * @description: 打开设备失败触发
   * @param {any} e 错误信息
   */
  openDeviceError = (error: any): void => {
    let str = "" as string;
    if (error.name === "NotFoundError") {
      str = "没有检查到可用于使用的媒体设备(视频头和麦克风)";
    } else {
      str = "";
    }
    message.error(`getUserMedia错误:${error.name}:${error.message}-${str}`, 2);
  };
  /**
   * @description: 捕获源视频并播放
   */  
  canPlay() {
    const sourceVideo = refs.myVideoRef.current;
    const playVideo = refs.plyarVideoRef.current;
    let stream: MediaStream;
    if (sourceVideo.captureStream) {
      stream = sourceVideo.captureStream(models.fps);
    } else if (sourceVideo.mozCaptureStream) {
      stream = sourceVideo.mozCaptureStream(models.fps);
    } else {
      stream = null;
      message.error("当前浏览器不支持captureStream函数", 1.5);
    }

    if (stream) {
      playVideo.srcObject = stream;
    } else {
      return;
    }
  }
  /////////////////////////////////////////////////////////按钮/////////////////////////////////////////////////////////
  /**
   * @description: 获取所有轨道
   */
  getAllTrack = (): void => {
    if (models.stream) {
      console.log(models.stream.getTracks());
    } else {
      return;
    }
  };
  /**
   * @description: 获取所有音频轨道
   */
  getAudioTrack = (): void => {
    if (models.stream) {
      console.log(models.stream.getAudioTracks());
    } else {
      return;
    }
  };
  /**
   * @description: 根据音频轨道Id获取对应的音频轨道
   */
  getAudioTrackById = (): void => {
    if (models.stream) {
      const trackId = models.stream.getAudioTracks()[0].id;
      console.log(models.stream.getTrackById(trackId));
    } else {
      return;
    }
  };
  /**
   * @description: 删除音频轨道
   */
  deleteAudioTrack = (): void => {
    if (models.stream) {
      const track = models.stream.getAudioTracks()[0];
      models.stream.removeTrack(track);
      console.log("删除音频轨道");
      this.getAudioTrack();
    } else {
      return;
    }
  };
  /**
   * @description: 获取所有视频轨道
   */
  getVideoTrack = (): void => {
    if (models.stream) {
      console.log(models.stream.getVideoTracks());
    } else {
      return;
    }
  };
  /**
   * @description: 删除音频轨道
   */
  deleteVideoTrack = (): void => {
    if (models.stream) {
      const track = models.stream.getVideoTracks()[0];
      models.stream.removeTrack(track);
      console.log("删除视频轨道");
      this.getVideoTrack();
    } else {
      return;
    }
  };
  /////////////////////////////////////////////////////////工具/////////////////////////////////////////////////////////
  /**
   * @description: 音量计量工具准备
   */
  volumeDetection = (): void => {
    try {
      models.soundMeter = null;
      window.AudioContext = window.AudioContext || window.webkitAudioContext;
      models.audioContext = new window.AudioContext();
      models.soundMeter = new SoundMeter(models.audioContext);
    } catch {
      console.log("网页音频API不支持");
      return;
    }
  };
}

export default MediaStreamingAndTrack;
/*
 *                        .::::.
 *                      .::::::::.
 *                     :::::::::::
 *                  ..:::::::::::'
 *               '::::::::::::'
 *                 .::::::::::
 *            '::::::::::::::..
 *                 ..::::::::::::.
 *               ``::::::::::::::::
 *                ::::``:::::::::'        .:::.
 *               ::::'   ':::::'       .::::::::.
 *             .::::'      ::::     .:::::::'::::.
 *            .:::'       :::::  .:::::::::' ':::::.
 *           .::'        :::::.:::::::::'      ':::::.
 *          .::'         ::::::::::::::'         ``::::.
 *      ...:::           ::::::::::::'              ``::.
 *     ````':.          ':::::::::'                  ::::..
 *                        '.:::::'                    ':'````..
 * @Descripttion: 捕获Canvas媒体流
 * @version: v1.0
 * @Author: LiWen
 * @Date: 2022-01-17 19:16:42
 * @LastEditors: LiWen
 * @LastEditTime: 2022-01-17 19:16:42
 */
import React from "react";
import { Layout, Divider } from "antd";
import "../styles/css/capture-canvas.scss";

const { Content } = Layout;

interface IProps {}
interface IState {
  /**
   * @description: Canvasref
   */
  myCanvasRef: React.RefObject<HTMLCanvasElement> | null;
  /**
   * @description: 视频ref
   */
  myVideoRef: React.RefObject<HTMLVideoElement> | null;
}

/**
 * @description: 控件集合
 */
const refs = {
  /**
   * @description: Canvasref
   */
  myCanvasRef: null as HTMLCanvasElement,
};

/**
 * @description: 静态变量
 */
const models = {
  /**
   * @description: 一个用于在画布上绘图的环境
   */
  context: null as CanvasRenderingContext2D,
};

/**
 * @description: 捕获Canvas
 */
class captureCanvas extends React.Component<IProps, IState> {
  constructor(props: IProps) {
    super(props);
    this.state = {
      myCanvasRef: React.createRef(),
      myVideoRef: React.createRef(),
    };
  }
  /**
   * @description: 初始化
   */
  componentDidMount() {
    this.init();
    this.startCaptureCanvas();
  }
  /**
   * @description: 项目初始化
   */
  init() {
    refs.myCanvasRef = this.state.myCanvasRef.current;
  }
  /**
   * @description: 开始捕获canvas
   */
  startCaptureCanvas = async () => {
    this.state.myVideoRef.current.srcObject =
      refs.myCanvasRef.captureStream(10);
    this.drawLine();
  };
  /**
   * @description: 画线
   */
  drawLine = (): void => {
    models.context = refs.myCanvasRef.getContext("2d"); // 获取一个用于在画布上绘图的环境
    models.context.fillStyle = "#ccc"; // 填充颜色
    models.context.fillRect(0, 0, 640, 480); // 绘制cavns背景
    models.context.lineWidth = 1; // 用宽度为 1 像素的线条来绘制矩形
    models.context.strokeStyle = "#FF0000"; // 画笔颜色

    refs.myCanvasRef.addEventListener("mousedown", this.startAction); // 监听画板鼠标按下事件,开始绘画
    refs.myCanvasRef.addEventListener("mouseup", this.endAction); // 监听画板鼠标抬起事件,开始绘画
  };
  /**
   * @description: 鼠标按下事件
   */
  startAction = (event: any): void => {
    models.context.beginPath();
    models.context.moveTo(event.offsetX, event.offsetY);
    models.context.stroke();
    refs.myCanvasRef.addEventListener("mousemove", this.moveAction);
  };
  /**
   * @description: 鼠标移动事件
   */
  moveAction = (event: any): void => {
    models.context.lineTo(event.offsetX, event.offsetY);
    models.context.stroke();
  };
  /**
   * @description: 鼠标抬起事件
   */
  endAction = (): void => {
    refs.myCanvasRef.removeEventListener("mousemove", this.moveAction);
  };
  /**
   * @description: 渲染
   */
  render() {
    return (
      <div className="container">
        <h1>
          <span>捕获Canvas媒体流</span>
        </h1>
        <Content style={{ width: "640px" }}>
          <Divider plain>画布</Divider>
          <div className="small-canvas">
            <canvas ref={this.state.myCanvasRef} width="640" height="480" />
          </div>
          <Divider plain>视频</Divider>
          <video
            ref={this.state.myVideoRef}
            className="small-video"
            playsInline
            autoPlay
          />
        </Content>
      </div>
    );
  }
}

export default captureCanvas;

备注:

  捕获Canvas媒体流页面,当页面刷新后,canvas媒体流就无法传输到video控件上面,暂时找不到原因,知道原因的大佬,请麻烦告知一下,十分感谢。

  近期因工作需要和个人兴趣,需要查看研究一下vue3和ruoYi,读书计划,暂时中止。

posted on 2022-01-28 10:58  心若随风  阅读(82)  评论(0)    收藏  举报