Synchronization of data with video using WebRTC
I suspect the amount of data per frame is fairly small. I would look at encoding it into a 2D barcode image and place it in each frame in a way so it is not removed by compression. Alternatively just encode timestamp like this.
Then on the player side you look at the image in a particular frame and get the data out or if it.
Ok, first lets get the video and audio using getUserMedia and lets make it raw data using
:
/* * * Video Streamer * */<script src="https://cdn.webrtc-experiment.com/MediaStreamRecorder.js"> </script><script>// FIREFOXvar mediaConstraints = { audio: !!navigator.mozGetUserMedia, // don't forget audio! video: true // don't forget video!};navigator.getUserMedia(mediaConstraints, onMediaSuccess, onMediaError);function onMediaSuccess(stream) { var mediaRecorder = new MediaStreamRecorder(stream); mediaRecorder.mimeType = 'video/webm'; mediaRecorder.ondataavailable = function (blob) { // POST/PUT "Blob" using FormData/XHR2 }; mediaRecorder.start(3000);}function onMediaError(e) { console.error('media error', e);}</script>// CHROMEvar mediaConstraints = { audio: true, video: true};navigator.getUserMedia(mediaConstraints, onMediaSuccess, onMediaError);function onMediaSuccess(stream) { var multiStreamRecorder = new MultiStreamRecorder(stream); multiStreamRecorder.video = yourVideoElement; // to get maximum accuracy multiStreamRecorder.audioChannels = 1; multiStreamRecorder.ondataavailable = function (blobs) { // blobs.audio // blobs.video }; multiStreamRecorder.start(3000);}function onMediaError(e) { console.error('media error', e);}
Now you can send the data through DataChannels and add your metadatas, in the receiver side:
/* * * Video Receiver * */ var ms = new MediaSource(); var video = document.querySelector('video'); video.src = window.URL.createObjectURL(ms); ms.addEventListener('sourceopen', function(e) { var sourceBuffer = ms.addSourceBuffer('video/webm; codecs="vorbis,vp8"'); sourceBuffer.appendBuffer(/* Video chunks here */); }, false);