DEV Community

Masui Masanori
Masui Masanori

Posted on

[TypeScript] Save MediaStream by MediaRecorder

Intro

I will try saving videos and audios by MediaRecorder.
I use the project when I created for trying WebRTC.

Base project

Index.cshtml

<!DOCTYPE html>
<html lang="en">
    <head>
        <title>Hello WebRTC</title>
        <meta charset="utf-8">
    </head>
    <body>
...
        <div id="webrtc_sample_area">
...
            <video id="local_video" muted>Video stream not available.</video>
            <video id="received_video" autoplay>Video stream not available.</video>
        </div>
        <div>
            <button onclick="Page.switchFrame()">Frame</button>
            <button onclick="Page.startRecording()">Start</button>
            <button onclick="Page.stopRecording()">Stop</button>            
        </div>
        <canvas id="picture_canvas"></canvas>
        <a id="download_target"></a>
        <script src="js/main.js"></script>
    </body>
</html>
Enter fullscreen mode Exit fullscreen mode

main.page.ts

import { VideoRecorder } from "./video-recorder";
import { WebRtcController } from "./webrtc-controller";
...
let rtcSample = new WebRtcController();
let videoRecorder: VideoRecorder;
...
export function startRecording(): void {
    videoRecorder.startRecording();
}
export function stopRecording(): void {
    videoRecorder.stopRecording();
}
export function switchFrame(): void {
    videoRecorder.updateCanvasSize();
    videoRecorder.switchFrame();
}
...
function init(){
    rtcSample = new WebRtcController();
    rtcSample.initVideo();
    videoRecorder = new VideoRecorder();
}
init();
Enter fullscreen mode Exit fullscreen mode

webrtc-controller.ts

...
export class WebRtcController {
...
  public initVideo(){
      const localVideo = document.getElementById("local_video") as HTMLVideoElement;
      let streaming = false;
      // after being UserMedia available, set Video element's size.
      localVideo.addEventListener("canplay", () => {
          if (streaming === false) {
            const width = 320;
            const height = localVideo.videoHeight / (localVideo.videoWidth/width);          
            localVideo.setAttribute("width", width.toString());
            localVideo.setAttribute("height", height.toString());
            streaming = true;
          }
        }, false);
      navigator.mediaDevices.getUserMedia({ video: true, audio: true })
        .then(stream => {
            this.webcamStream = stream;
            localVideo.srcObject = stream;
            localVideo.play();
            streaming = true;
        })
        .catch(err => console.error(`An error occurred: ${err}`));
  }
...
Enter fullscreen mode Exit fullscreen mode

Save videos and audios

I can save videos and audios

video-recorder.ts

export class VideoRecorder {
    private recorder: MediaRecorder|null = null;
    public startRecording() {
        const localVideo = document.getElementById("local_video") as HTMLVideoElement;
        const localVideoStream = this.getVideoStream(this.localVideo);
        if(localVideoStream != null) {
            this.recorder = new MediaRecorder(localVideoStream, this.getMimeType());
            this.recorder.ondataavailable = (ev) => this.saveRecordedVideo(ev);
            this.recorder.start();
        }
    }
    public stopRecording() {
        this.recorder?.stop();
    }
    private getVideoStream(targetElement: HTMLVideoElement): MediaStream|null {
        if(targetElement.srcObject != null &&
            "getTracks" in targetElement.srcObject &&
            typeof targetElement.srcObject.getTracks === "function" &&
            "addTrack" in targetElement.srcObject &&
            typeof targetElement.srcObject.addTrack === "function") {
            return targetElement.srcObject;
        }
        return null;
    }
    private saveRecordedVideo(ev: BlobEvent): void {
        if(ev.data.size <= 0) {           
            return;
        }
        const url = URL.createObjectURL(ev.data);
        const downloadTarget = document.getElementById("download_target") as HTMLAnchorElement;
        downloadTarget.download = "sample.webm";
        downloadTarget.href = url;
        downloadTarget.click();
    }
    private getMimeType(): MediaRecorderOptions {
        if(MediaRecorder.isTypeSupported("video/webm; codecs=vp9")) {
            return { mimeType: "video/webm; codecs=vp9" };
        }
        if(MediaRecorder.isTypeSupported("video/webm; codecs=vp8")) {
            return { mimeType: "video/webm; codecs=vp8" };
        }
        return { mimeType: "video/webm" };
    }
}
Enter fullscreen mode Exit fullscreen mode

Codecs

By default, I can save videos as only WebM.
Because Firefox can't treat vp9, so I add vp8 for MIME Type.

Synthesize MediaStream

For example, I want to add pictures, audios in the video.

Audios

I can't add multiple video tracks or audio tracks into one MediaStream.
It only treats the first track.

...
    public startRecording() {
        const localVideo = document.getElementById("local_video") as HTMLVideoElement;
        const localVideoStream = this.getVideoStream(this.localVideo);
        if(localVideoStream != null) {
            // these two rows are ignored,
            // because localVideoStream already has a video track and an audio track.
            localVideoStream.addTrack(someVideoStream);
            localVideoStream.addTrack(someAudioStream);

            this.recorder = new MediaRecorder(localVideoStream, this.getMimeType());
            this.recorder.ondataavailable = (ev) => this.saveRecordedVideo(ev);
            this.recorder.start();
        }
    }
...
Enter fullscreen mode Exit fullscreen mode

So I use Web Audio API to merge multiple audios.

video-recorder.ts

export class VideoRecorder {
    private recorder: MediaRecorder|null = null;
    private localVideo: HTMLVideoElement;
    private recording = false;
    private mixedAudioDestinationNode: MediaStreamAudioDestinationNode|null = null;

    public constructor() {
        this.localVideo = document.getElementById("local_video") as HTMLVideoElement;
        this.localVideo.onplay = () => this.init();
    }
    public startRecording() {
        this.recording = true;
        const videoTrack = this.getVideoStream(this.localVideo)?.getVideoTracks()[0]!;
        if(videoTrack == null) {
            return;
        }
        const newStream = new MediaStream();
        if(this.mixedAudioDestinationNode != null) {
            newStream.addTrack(this.mixedAudioDestinationNode.stream.getAudioTracks()[0]!);
        }
        newStream.addTrack(videoTrack);
        this.recorder = new MediaRecorder(newStream, this.getMimeType());
        this.recorder.start();        
        this.recorder.ondataavailable = (ev) => this.saveRecordedVideo(ev);
    }
    public stopRecording() {
        this.recording = false;
        this.recorder?.stop();
    }
...
    private init(): void {
        const localVideoStream = this.getVideoStream(this.localVideo);
        if(localVideoStream != null) {
            this.createMixedAudio(localVideoStream);
        }
    }
...
    private createMixedAudio(stream: MediaStream): void {
        const audioContext = new AudioContext();
        const audioSourceNode = audioContext.createMediaStreamSource(stream);
        const delay = new DelayNode(audioContext);
        delay.delayTime.value = 1;
        const splitter = audioContext.createChannelSplitter(2);
        audioSourceNode.connect(splitter);
        splitter.connect(delay, 1);
        const merger = audioContext.createChannelMerger(2);
        delay.connect(merger, 0, 1);
        splitter.connect(merger, 1, 0);
        this.mixedAudioDestinationNode = audioContext.createMediaStreamDestination();
        merger.connect(audioContext.destination);
        merger.connect(this.mixedAudioDestinationNode);
    }
...
}
Enter fullscreen mode Exit fullscreen mode

Pictures

I can create MediaStream from Canvas elements.
And because I can't add multiple video tracks into one MediaStream,
so I draw the video as images in a Canvas element.
After adding images into it, I create MediaStream from it.

video-recorder.ts

export class VideoRecorder {
    private recorder: MediaRecorder|null = null;
    private localVideo: HTMLVideoElement;
    private recording = false;
    private pictureCanvas: HTMLCanvasElement;
    private frameShown = false;

    private frameImage: HTMLImageElement|null = null;
    private mixedAudioDestinationNode: MediaStreamAudioDestinationNode|null = null;

    public constructor() {
        this.localVideo = document.getElementById("local_video") as HTMLVideoElement;
        this.localVideo.onplay = () => this.init();
        this.pictureCanvas = document.getElementById("picture_canvas") as HTMLCanvasElement;        
        this.pictureCanvas.style.position = "absolute";
    }
    public startRecording() {
        this.recording = true;
        const pictureStream = this.pictureCanvas.captureStream(60);
        const pictureTrack = pictureStream.getVideoTracks()[0];
        if(pictureTrack == null) {
            console.error("No picture video tracks");
            return;
        }
        const newStream = new MediaStream();
        if(this.mixedAudioDestinationNode != null) {
            newStream.addTrack(this.mixedAudioDestinationNode.stream.getAudioTracks()[0]!);
        }
        newStream.addTrack(pictureTrack);
        this.recorder = new MediaRecorder(newStream, this.getMimeType());
        this.recorder.start();
        this.updatePictureCanvas(this.pictureCanvas.getContext("2d") as CanvasRenderingContext2D);

        this.recorder.ondataavailable = (ev) => this.saveRecordedVideo(ev);
    }
    public stopRecording() {
        this.recording = false;
        this.recorder?.stop();
    }
    public updateCanvasSize(): void {
        this.pictureCanvas.width = this.localVideo.videoWidth;
        this.pictureCanvas.height = this.localVideo.videoHeight;
        const rect = this.localVideo.getBoundingClientRect();
        this.pictureCanvas.style.top = `${rect.top}px`;
        this.pictureCanvas.style.left = `${rect.left}px`;
        const ctx = this.pictureCanvas.getContext("2d") as CanvasRenderingContext2D;
        this.frameImage = new Image();
        this.frameImage.onload = () => this.drawFrameImage(ctx);
        this.frameImage.src = "../img/frame.png";
    }
    public switchFrame(): void {
        if(this.frameShown === true) {
            this.pictureCanvas.style.display = "none";
            this.frameShown = false;
        } else {
            this.pictureCanvas.style.display = "block";
            this.frameShown = true;
        }
    }    
    private init(): void {
        const localVideoStream = this.getVideoStream(this.localVideo);
        if(localVideoStream != null) {
            this.createMixedAudio(localVideoStream);
        }
    }
    private getVideoStream(targetElement: HTMLVideoElement): MediaStream|null {
        if(targetElement.srcObject != null &&
            "getTracks" in targetElement.srcObject &&
            typeof targetElement.srcObject.getTracks === "function" &&
            "addTrack" in targetElement.srcObject &&
            typeof targetElement.srcObject.addTrack === "function") {
            return targetElement.srcObject;
        }
        return null;
    }
    private createMixedAudio(stream: MediaStream): void {
        const audioContext = new AudioContext();
        const audioSourceNode = audioContext.createMediaStreamSource(stream);
        const delay = new DelayNode(audioContext);
        delay.delayTime.value = 1;
        const splitter = audioContext.createChannelSplitter(2);
        audioSourceNode.connect(splitter);
        splitter.connect(delay, 1);
        const merger = audioContext.createChannelMerger(2);
        delay.connect(merger, 0, 1);
        splitter.connect(merger, 1, 0);
        this.mixedAudioDestinationNode = audioContext.createMediaStreamDestination();
        merger.connect(audioContext.destination);
        merger.connect(this.mixedAudioDestinationNode);
    }
    private saveRecordedVideo(ev: BlobEvent): void {
        if(ev.data.size <= 0) {
            console.error("No video data");            
            return;
        }
        const url = URL.createObjectURL(ev.data);
        const downloadTarget = document.getElementById("download_target") as HTMLAnchorElement;
        downloadTarget.download = "sample.webm";
        downloadTarget.href = url;
        downloadTarget.click();
    }
    private getMimeType(): MediaRecorderOptions {
        if(MediaRecorder.isTypeSupported("video/webm; codecs=vp9")) {
            return { mimeType: "video/webm; codecs=vp9" };
        }
        if(MediaRecorder.isTypeSupported("video/webm; codecs=vp8")) {
            return { mimeType: "video/webm; codecs=vp8" };
        }
        return { mimeType: "video/webm" };
    }
    private updatePictureCanvas(ctx: CanvasRenderingContext2D) {
        if(this.recording === false) {
            return;
        }
        this.drawFrameImage(ctx);
        // To save as video, I have to redraw the images.
        setTimeout(() => this.updatePictureCanvas(ctx), 1000.0 / 60.0);
    }
    private drawFrameImage(ctx: CanvasRenderingContext2D): void {
        if(this.frameImage == null) {
            return;
        }
        ctx.drawImage(this.localVideo, 0, 0, this.localVideo.videoWidth, this.localVideo.videoHeight);
        ctx.drawImage(this.frameImage, 0, 0, this.localVideo.videoWidth, this.localVideo.videoHeight);
    }
}
Enter fullscreen mode Exit fullscreen mode

Resources

Top comments (0)