Re: [mediacapture-record] MediaRecorder needs to define effect of adding / removing tracks in its input MediaStream

Since this issue is still open the inquiry appears to have not been resolved. 

If a `MediaStream` is active `MediaRecorder` should record the existing video and audio `MediaStreamTrack`s, instread of only recording the initial track, or possibly recording only portions of subsequent tracks added to the active `MediaStream`. 

Given the code 

Chromium

```
<!DOCTYPE html>
<html>

<head>
  <title>Record media fragments to single video using HTMLMediaElement.captureStream(), MediaRecorder, and MediaSource</title>
</head>

<body>
  <video width="320" height="280" autoplay="true"></video>
  <script>
    // https://github.com/guest271314/MediaFragmentRecorder/ 3-11-2018
    // https://github.com/w3c/media-source/issues/190
    const captureStream = mediaElement =>
      !!mediaElement.mozCaptureStream ? mediaElement.mozCaptureStream() : mediaElement.captureStream();
    class MediaFragmentRecorder {
      constructor({
        urls = [],
          video = document.createElement("video"),
          width = 320,
          height = 280
      } = {}) {
        if (urls.length === 0) {
          throw new TypeError("no urls passed to MediaFragmentRecorder");
        }
        return (async() => {
          video.height = height;
          video.width = width;
          video.autoplay = true;
          video.preload = "auto";
          video.controls = true;
          // video.muted = true;
          let videoStream = document.querySelector("video");
          const chunks = [];
          let duration = 0;
          let mediaStream = new MediaStream();
          videoStream.srcObject = mediaStream;
          let recorder;
          mediaStream.addEventListener("inactive", e => {
            console.log(e)
          });
          mediaStream.addEventListener("active", e => {

            console.log(e);
            recorder = new MediaRecorder(e.target, {
              mimeType: "video/webm;codecs=vp8,opus"
            });
            recorder.start();
            recorder.addEventListener("dataavailable", e => {
              console.log(e, e.data);
              let media = document.createElement("video");
              media.width = width;
              media.height = height;
              media.controls = true;
              document.body.appendChild(media);
              media.src = URL.createObjectURL(e.data);
            });
            recorder.addEventListener("stop", e => {
              console.log(e)
            });

          });
          mediaStream.addEventListener("addtrack", e => {
            console.log(e)
          });
          mediaStream.addEventListener("removetrack", e => {
            console.log(e)
          });
          let media = await Promise.all(
            urls.map(async({
              from,
              to,
              src
            }, index) => {
              const url = new URL(src);
              // get media fragment hash from `src`
              if (url.hash.length) {
                [from, to] = url.hash.match(/\d+/g);
              }
              return {
                blob: await fetch(src).then(response => response.blob()),
                from,
                to
              }
            }));

          for (let {
              from,
              to,
              blob
            }
            of media) {
            await new Promise(async(resolve) => {
              const blobURL = URL.createObjectURL(blob);
              video.addEventListener("playing", e => {

                let stream = captureStream(video);

                let tracks = stream.getTracks();

                for (let track of tracks) {
                  mediaStream.addTrack(track)
                }

                for (let track of mediaStream.getTracks()) {
                  if (tracks.find(({
                      id
                    }) => track.id === id) === undefined) {
                    mediaStream.removeTrack(track);
                  }
                }

                video.addEventListener("pause", e => {
                  resolve();
                  console.log(video.played.end(0) - video.played.start(0), video.currentTime - from, video.currentTime);
                  duration += video.currentTime - from;
                }, {
                  once: true
                });
              }, {
                once: true
              });
              video.addEventListener("canplay", e => video.play(), {
                once: true
              });
              video.src = `${blobURL}#t=${from},${to}`;
            })
          };
          recorder.stop();
          video.load();
          return {
            chunks,
            duration,
            width,
            height,
            video
          }
        })()
      }
    }
    let urls = [{
      src: "https://upload.wikimedia.org/wikipedia/commons/a/a4/Xacti-AC8EX-Sample_video-001.ogv",
      from: 0,
      to: 4
    }, {
      src: "https://mirrors.creativecommons.org/movingimages/webm/ScienceCommonsJesseDylan_240p.webm#t=10,20"
    }, {
      from: 55,
      to: 60,
      src: "https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4"
    }, {
      from: 0,
      to: 5,
      src: "https://raw.githubusercontent.com/w3c/web-platform-tests/master/media-source/mp4/test.mp4"
    }, {
      from: 0,
      to: 5,
      src: "https://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerBlazes.mp4"
    }, {
      from: 0,
      to: 5,
      src: "https://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerJoyrides.mp4"
    }, {
      src: "https://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerMeltdowns.mp4#t=0,6"
    }];
    new MediaFragmentRecorder({
      urls
    })
  </script>
</body>

</html>
```

Firefox 

```
<!DOCTYPE html>
<html>

<head>
  <title>Record media fragments to single video using HTMLMediaElement.captureStream(), MediaRecorder, and MediaSource</title>
</head>

<body>
  <video width="320" height="280" autoplay="true"></video>
  <script>
    // https://github.com/guest271314/MediaFragmentRecorder/ 3-11-2018
    // https://github.com/w3c/media-source/issues/190
    const captureStream = mediaElement =>
      !!mediaElement.mozCaptureStream ? mediaElement.mozCaptureStream() : mediaElement.captureStream();
    class MediaFragmentRecorder {
      constructor({
        urls = [],
          video = document.createElement("video"),
          width = 320,
          height = 280
      } = {}) {
        if (urls.length === 0) {
          throw new TypeError("no urls passed to MediaFragmentRecorder");
        }
        return (async() => {
          video.height = height;
          video.width = width;
          video.autoplay = true;
          video.preload = "auto";
          video.controls = true;
          // video.muted = true;
          let videoStream = document.querySelector("video");
          const chunks = [];
          let duration = 0;
          let mediaStream = new MediaStream();
          videoStream.srcObject = mediaStream;
          let recorder;
          mediaStream.addEventListener("inactive", e => {
            console.log(e)
          });
          mediaStream.addEventListener("active", e => {

            console.log(e);


          });
          mediaStream.addEventListener("addtrack", e => {
            console.log(e)
          });
          mediaStream.addEventListener("removetrack", e => {
            console.log(e)
          });
          let media = await Promise.all(
            urls.map(async({
              from,
              to,
              src
            }, index) => {
              const url = new URL(src);
              // get media fragment hash from `src`
              if (url.hash.length) {
                [from, to] = url.hash.match(/\d+/g);
              }
              return {
                blob: await fetch(src).then(response => response.blob()),
                from,
                to
              }
            }));

          for (let {
              from,
              to,
              blob
            }
            of media) {
            await new Promise(async(resolve) => {
              const blobURL = URL.createObjectURL(blob);
              video.addEventListener("playing", e => {

                let stream = captureStream(video);

                let tracks = stream.getTracks();
                //if (recorder) recorder.pause();
                for (let track of tracks) {
                  mediaStream.addTrack(track)
                }
                if (!recorder) {
                  recorder = new MediaRecorder(mediaStream, {
                    mimeType: "video/webm;codecs=vp8,opus"
                  });
                  recorder.addEventListener("start", e => {
                    console.log(e)
                  });
                  recorder.start();
                  recorder.addEventListener("dataavailable", e => {
                    console.log(e, e.data);
                    let media = document.createElement("video");
                    media.width = width;
                    media.height = height;
                    media.controls = true;
                    document.body.appendChild(media);
                    media.src = URL.createObjectURL(e.data);
                  });
                  recorder.addEventListener("stop", e => {
                    console.log(e)
                  });
                }
                for (let track of mediaStream.getTracks()) {
                  if (tracks.find(({
                      id
                    }) => track.id === id) === undefined) {
                    mediaStream.removeTrack(track);
                  }
                }
                // if (recorder) recorder.resume();
                video.addEventListener("pause", e => {
                  resolve();
                  console.log(video.played.end(0) - video.played.start(0), video.currentTime - from, video.currentTime);
                  duration += video.currentTime - from;
                }, {
                  once: true
                });
              }, {
                once: true
              });
              video.addEventListener("canplay", e => video.play(), {
                once: true
              });
              video.src = `${blobURL}#t=${from},${to}`;
            })
          };
          recorder.stop();
          video.load();
          return;
        })()
      }
    }
    let urls = [{
      src: "https://upload.wikimedia.org/wikipedia/commons/a/a4/Xacti-AC8EX-Sample_video-001.ogv",
      from: 0,
      to: 4
    }, {
      src: "https://mirrors.creativecommons.org/movingimages/webm/ScienceCommonsJesseDylan_240p.webm#t=10,20"
    }, {
      from: 55,
      to: 60,
      src: "https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4"
    }, {
      from: 0,
      to: 5,
      src: "https://raw.githubusercontent.com/w3c/web-platform-tests/master/media-source/mp4/test.mp4"
    }, {
      from: 0,
      to: 5,
      src: "https://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerBlazes.mp4"
    }, {
      from: 0,
      to: 5,
      src: "https://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerJoyrides.mp4"
    }, {
      src: "https://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerMeltdowns.mp4#t=0,6"
    }];
    new MediaFragmentRecorder({
      urls
    })
  </script>
</body>

</html>
```

the `MediaStream` `state` is `active` throughout the procedure. Though all of the `MediaStreamTrack`s are not recorded. If it is not possible to record an array of `MediaStreamTrack`s in sequence, then `MediaRecorder` should at least be able to recognize that the new (only video or audio) track should be recorded "seamlessly" concatenated to the recorded media of the previous tracks, similar to `.pause()` and `.resume()` functionality.

-- 
GitHub Notification of comment by guest271314
Please view or discuss this issue at https://github.com/w3c/mediacapture-record/issues/4#issuecomment-373930061 using your GitHub account

Received on Saturday, 17 March 2018 15:47:10 UTC