This commit is contained in:
2023-04-20 12:05:32 +02:00
commit 9975bbb42c
30 changed files with 11480 additions and 0 deletions

23
.gitignore vendored Normal file
View File

@@ -0,0 +1,23 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# production
/build
# misc
.DS_Store
.env.local
.env.development.local
.env.test.local
.env.production.local
npm-debug.log*
yarn-debug.log*
yarn-error.log*

46
README.md Normal file
View File

@@ -0,0 +1,46 @@
# Getting Started with Create React App
This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app).
## Available Scripts
In the project directory, you can run:
### `yarn start`
Runs the app in the development mode.\
Open [http://localhost:3000](http://localhost:3000) to view it in the browser.
The page will reload if you make edits.\
You will also see any lint errors in the console.
### `yarn test`
Launches the test runner in the interactive watch mode.\
See the section about [running tests](https://facebook.github.io/create-react-app/docs/running-tests) for more information.
### `yarn build`
Builds the app for production to the `build` folder.\
It correctly bundles React in production mode and optimizes the build for the best performance.
The build is minified and the filenames include the hashes.\
Your app is ready to be deployed!
See the section about [deployment](https://facebook.github.io/create-react-app/docs/deployment) for more information.
### `yarn eject`
**Note: this is a one-way operation. Once you `eject`, you cant go back!**
If you arent satisfied with the build tool and configuration choices, you can `eject` at any time. This command will remove the single build dependency from your project.
Instead, it will copy all the configuration files and the transitive dependencies (webpack, Babel, ESLint, etc) right into your project so you have full control over them. All of the commands except `eject` will still work, but they will point to the copied scripts so you can tweak them. At this point youre on your own.
You dont have to ever use `eject`. The curated feature set is suitable for small and middle deployments, and you shouldnt feel obligated to use this feature. However we understand that this tool wouldnt be useful if you couldnt customize it when you are ready for it.
## Learn More
You can learn more in the [Create React App documentation](https://facebook.github.io/create-react-app/docs/getting-started).
To learn React, check out the [React documentation](https://reactjs.org/).

63
archive/test.html Normal file
View File

@@ -0,0 +1,63 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>go2rtc - Stream</title>
<style>
body {
background: black;
margin: 0;
padding: 0;
display: flex;
font-family: Arial, Helvetica, sans-serif;
}
html,
body {
height: 100%;
width: 100%;
}
.flex {
flex-wrap: wrap;
align-content: flex-start;
align-items: flex-start;
}
</style>
</head>
<body>
<script type="module" src="./video-stream.js"></script>
<script type="module">
const params = new URLSearchParams(location.search)
// support multiple streams and multiple modes
const streams = params.getAll('src')
const modes = params.getAll('mode')
if (modes.length === 0) modes.push('')
while (modes.length > streams.length) {
streams.push(streams[0])
}
while (streams.length > modes.length) {
modes.push(modes[0])
}
if (streams.length > 1) {
document.body.className = 'flex'
}
const background = params.get('background') !== 'false'
const width = '1 0 ' + (params.get('width') || '320px')
for (let i = 0; i < streams.length; i++) {
/** @type {VideoStream} */
const video = document.createElement('video-stream')
video.background = background
video.mode = modes[i] || video.mode
video.style.flex = width
video.src = new URL('api/ws?src=' + encodeURIComponent(streams[i]), location.href)
document.body.appendChild(video)
}
</script>
</body>
</html>

597
archive/video-rtc.js Normal file
View File

@@ -0,0 +1,597 @@
/**
* Video player for go2rtc streaming application.
*
* All modern web technologies are supported in almost any browser except Apple Safari.
*
* Support:
* - RTCPeerConnection for Safari iOS 11.0+
* - IntersectionObserver for Safari iOS 12.2+
*
* Doesn't support:
* - MediaSource for Safari iOS all
* - Customized built-in elements (extends HTMLVideoElement) because all Safari
* - Public class fields because old Safari (before 14.0)
* - Autoplay for Safari
*/
export class VideoRTC extends HTMLElement {
constructor() {
super();
this.DISCONNECT_TIMEOUT = 5000;
this.RECONNECT_TIMEOUT = 30000;
this.CODECS = [
"avc1.640029", // H.264 high 4.1 (Chromecast 1st and 2nd Gen)
"avc1.64002A", // H.264 high 4.2 (Chromecast 3rd Gen)
"avc1.640033", // H.264 high 5.1 (Chromecast with Google TV)
"hvc1.1.6.L153.B0", // H.265 main 5.1 (Chromecast Ultra)
"mp4a.40.2", // AAC LC
"mp4a.40.5", // AAC HE
"opus", // OPUS Chrome
];
/**
* [config] Supported modes (webrtc, mse, mp4, mjpeg).
* @type {string}
*/
this.mode = "webrtc,mse,mp4,mjpeg";
/**
* [config] Run stream when not displayed on the screen. Default `false`.
* @type {boolean}
*/
this.background = false;
/**
* [config] Run stream only when player in the viewport. Stop when user scroll out player.
* Value is percentage of visibility from `0` (not visible) to `1` (full visible).
* Default `0` - disable;
* @type {number}
*/
this.visibilityThreshold = 0;
/**
* [config] Run stream only when browser page on the screen. Stop when user change browser
* tab or minimise browser windows.
* @type {boolean}
*/
this.visibilityCheck = true;
/**
* [config] WebRTC configuration
* @type {RTCConfiguration}
*/
this.pcConfig = {
iceServers: [{urls: 'stun:stun.l.google.com:19302'}],
sdpSemantics: 'unified-plan', // important for Chromecast 1
};
/**
* [info] WebSocket connection state. Values: CONNECTING, OPEN, CLOSED
* @type {number}
*/
this.wsState = WebSocket.CLOSED;
/**
* [info] WebRTC connection state.
* @type {number}
*/
this.pcState = WebSocket.CLOSED;
/**
* @type {HTMLVideoElement}
*/
this.video = null;
/**
* @type {WebSocket}
*/
this.ws = null;
/**
* @type {string|URL}
*/
this.wsURL = "";
/**
* @type {RTCPeerConnection}
*/
this.pc = null;
/**
* @type {number}
*/
this.connectTS = 0;
/**
* @type {string}
*/
this.mseCodecs = "";
/**
* [internal] Disconnect TimeoutID.
* @type {number}
*/
this.disconnectTID = 0;
/**
* [internal] Reconnect TimeoutID.
* @type {number}
*/
this.reconnectTID = 0;
/**
* [internal] Handler for receiving Binary from WebSocket.
* @type {Function}
*/
this.ondata = null;
/**
* [internal] Handlers list for receiving JSON from WebSocket
* @type {Object.<string,Function>}}
*/
this.onmessage = null;
}
/**
* Set video source (WebSocket URL). Support relative path.
* @param {string|URL} value
*/
set src(value) {
if (typeof value !== "string") value = value.toString();
if (value.startsWith("http")) {
value = "ws" + value.substring(4);
} else if (value.startsWith("/")) {
value = "ws" + location.origin.substring(4) + value;
}
this.wsURL = value;
this.onconnect();
}
/**
* Play video. Support automute when autoplay blocked.
* https://developer.chrome.com/blog/autoplay/
*/
play() {
this.video.play().catch(er => {
if (er.name === "NotAllowedError" && !this.video.muted) {
this.video.muted = true;
this.video.play().catch(() => console.debug);
}
});
}
/**
* Send message to server via WebSocket
* @param {Object} value
*/
send(value) {
if (this.ws) this.ws.send(JSON.stringify(value));
}
codecs(type) {
const test = type === "mse"
? codec => MediaSource.isTypeSupported(`video/mp4; codecs="${codec}"`)
: codec => this.video.canPlayType(`video/mp4; codecs="${codec}"`);
return this.CODECS.filter(test).join();
}
/**
* `CustomElement`. Invoked each time the custom element is appended into a
* document-connected element.
*/
connectedCallback() {
if (this.disconnectTID) {
clearTimeout(this.disconnectTID);
this.disconnectTID = 0;
}
// because video autopause on disconnected from DOM
if (this.video) {
const seek = this.video.seekable;
if (seek.length > 0) {
this.video.currentTime = seek.end(seek.length - 1);
}
this.play();
} else {
this.oninit();
}
this.onconnect();
}
/**
* `CustomElement`. Invoked each time the custom element is disconnected from the
* document's DOM.
*/
disconnectedCallback() {
if (this.background || this.disconnectTID) return;
if (this.wsState === WebSocket.CLOSED && this.pcState === WebSocket.CLOSED) return;
this.disconnectTID = setTimeout(() => {
if (this.reconnectTID) {
clearTimeout(this.reconnectTID);
this.reconnectTID = 0;
}
this.disconnectTID = 0;
this.ondisconnect();
}, this.DISCONNECT_TIMEOUT);
}
/**
* Creates child DOM elements. Called automatically once on `connectedCallback`.
*/
oninit() {
this.video = document.createElement("video");
this.video.controls = true;
this.video.playsInline = true;
this.video.preload = "auto";
this.video.style.display = "block"; // fix bottom margin 4px
this.video.style.width = "100%";
this.video.style.height = "100%"
this.appendChild(this.video);
if (this.background) return;
if ("hidden" in document && this.visibilityCheck) {
document.addEventListener("visibilitychange", () => {
if (document.hidden) {
this.disconnectedCallback();
} else if (this.isConnected) {
this.connectedCallback();
}
})
}
if ("IntersectionObserver" in window && this.visibilityThreshold) {
const observer = new IntersectionObserver(entries => {
entries.forEach(entry => {
if (!entry.isIntersecting) {
this.disconnectedCallback();
} else if (this.isConnected) {
this.connectedCallback();
}
});
}, {threshold: this.visibilityThreshold});
observer.observe(this);
}
}
/**
* Connect to WebSocket. Called automatically on `connectedCallback`.
* @return {boolean} true if the connection has started.
*/
onconnect() {
if (!this.isConnected || !this.wsURL || this.ws || this.pc) return false;
// CLOSED or CONNECTING => CONNECTING
this.wsState = WebSocket.CONNECTING;
this.connectTS = Date.now();
this.ws = new WebSocket(this.wsURL);
this.ws.binaryType = "arraybuffer";
this.ws.addEventListener("open", ev => this.onopen(ev));
this.ws.addEventListener("close", ev => this.onclose(ev));
return true;
}
ondisconnect() {
this.wsState = WebSocket.CLOSED;
if (this.ws) {
this.ws.close();
this.ws = null;
}
this.pcState = WebSocket.CLOSED;
if (this.pc) {
this.pc.close();
this.pc = null;
}
}
/**
* @returns {Array.<string>} of modes (mse, webrtc, etc.)
*/
onopen() {
// CONNECTING => OPEN
this.wsState = WebSocket.OPEN;
this.ws.addEventListener("message", ev => {
if (typeof ev.data === "string") {
const msg = JSON.parse(ev.data);
for (const mode in this.onmessage) {
this.onmessage[mode](msg);
}
} else {
this.ondata(ev.data);
}
});
this.ondata = null;
this.onmessage = {};
const modes = [];
if (this.mode.indexOf("mse") >= 0 && "MediaSource" in window) { // iPhone
modes.push("mse");
this.onmse();
} else if (this.mode.indexOf("mp4") >= 0) {
modes.push("mp4");
this.onmp4();
}
if (this.mode.indexOf("webrtc") >= 0 && "RTCPeerConnection" in window) { // macOS Desktop app
modes.push("webrtc");
this.onwebrtc();
}
if (this.mode.indexOf("mjpeg") >= 0) {
if (modes.length) {
this.onmessage["mjpeg"] = msg => {
if (msg.type !== "error" || msg.value.indexOf(modes[0]) !== 0) return;
this.onmjpeg();
}
} else {
modes.push("mjpeg");
this.onmjpeg();
}
}
return modes;
}
/**
* @return {boolean} true if reconnection has started.
*/
onclose() {
if (this.wsState === WebSocket.CLOSED) return false;
// CONNECTING, OPEN => CONNECTING
this.wsState = WebSocket.CONNECTING;
this.ws = null;
// reconnect no more than once every X seconds
const delay = Math.max(this.RECONNECT_TIMEOUT - (Date.now() - this.connectTS), 0);
this.reconnectTID = setTimeout(() => {
this.reconnectTID = 0;
this.onconnect();
}, delay);
return true;
}
onmse() {
const ms = new MediaSource();
ms.addEventListener("sourceopen", () => {
URL.revokeObjectURL(this.video.src);
this.send({type: "mse", value: this.codecs("mse")});
}, {once: true});
this.video.src = URL.createObjectURL(ms);
this.video.srcObject = null;
this.play();
this.mseCodecs = "";
this.onmessage["mse"] = msg => {
if (msg.type !== "mse") return;
this.mseCodecs = msg.value;
const sb = ms.addSourceBuffer(msg.value);
sb.mode = "segments"; // segments or sequence
sb.addEventListener("updateend", () => {
if (sb.updating) return;
try {
if (bufLen > 0) {
const data = buf.slice(0, bufLen);
bufLen = 0;
sb.appendBuffer(data);
} else if (sb.buffered && sb.buffered.length) {
const end = sb.buffered.end(sb.buffered.length - 1) - 15;
const start = sb.buffered.start(0);
if (end > start) {
sb.remove(start, end);
ms.setLiveSeekableRange(end, end + 15);
}
// console.debug("VideoRTC.buffered", start, end);
}
} catch (e) {
// console.debug(e);
}
});
const buf = new Uint8Array(2 * 1024 * 1024);
let bufLen = 0;
this.ondata = data => {
if (sb.updating || bufLen > 0) {
const b = new Uint8Array(data);
buf.set(b, bufLen);
bufLen += b.byteLength;
// console.debug("VideoRTC.buffer", b.byteLength, bufLen);
} else {
try {
sb.appendBuffer(data);
} catch (e) {
// console.debug(e);
}
}
}
}
}
onwebrtc() {
const pc = new RTCPeerConnection(this.pcConfig);
/** @type {HTMLVideoElement} */
const video2 = document.createElement("video");
video2.addEventListener("loadeddata", ev => this.onpcvideo(ev), {once: true});
pc.addEventListener("icecandidate", ev => {
const candidate = ev.candidate ? ev.candidate.toJSON().candidate : "";
this.send({type: "webrtc/candidate", value: candidate});
});
pc.addEventListener("track", ev => {
// when stream already init
if (video2.srcObject !== null) return;
// when audio track not exist in Chrome
if (ev.streams.length === 0) return;
// when audio track not exist in Firefox
if (ev.streams[0].id[0] === '{') return;
video2.srcObject = ev.streams[0];
});
pc.addEventListener("connectionstatechange", () => {
if (pc.connectionState === "failed" || pc.connectionState === "disconnected") {
pc.close(); // stop next events
this.pcState = WebSocket.CLOSED;
this.pc = null;
this.onconnect();
}
});
this.onmessage["webrtc"] = msg => {
switch (msg.type) {
case "webrtc/candidate":
pc.addIceCandidate({
candidate: msg.value,
sdpMid: "0"
}).catch(() => console.debug);
break;
case "webrtc/answer":
pc.setRemoteDescription({
type: "answer",
sdp: msg.value
}).catch(() => console.debug);
break;
case "error":
if (msg.value.indexOf("webrtc/offer") < 0) return;
pc.close();
}
};
// Safari doesn't support "offerToReceiveVideo"
pc.addTransceiver("video", {direction: "recvonly"});
pc.addTransceiver("audio", {direction: "recvonly"});
pc.createOffer().then(offer => {
pc.setLocalDescription(offer).then(() => {
this.send({type: "webrtc/offer", value: offer.sdp});
});
});
this.pcState = WebSocket.CONNECTING;
this.pc = pc;
}
/**
* @param ev {Event}
*/
onpcvideo(ev) {
if (!this.pc) return;
/** @type {HTMLVideoElement} */
const video2 = ev.target;
const state = this.pc.connectionState;
// Firefox doesn't support pc.connectionState
if (state === "connected" || state === "connecting" || !state) {
// Video+Audio > Video, H265 > H264, Video > Audio, WebRTC > MSE
let rtcPriority = 0, msePriority = 0;
/** @type {MediaStream} */
const ms = video2.srcObject;
if (ms.getVideoTracks().length > 0) rtcPriority += 0x220;
if (ms.getAudioTracks().length > 0) rtcPriority += 0x102;
if (this.mseCodecs.indexOf("hvc1.") >= 0) msePriority += 0x230;
if (this.mseCodecs.indexOf("avc1.") >= 0) msePriority += 0x210;
if (this.mseCodecs.indexOf("mp4a.") >= 0) msePriority += 0x101;
if (rtcPriority >= msePriority) {
this.video.srcObject = ms;
this.play();
this.pcState = WebSocket.OPEN;
this.wsState = WebSocket.CLOSED;
this.ws.close();
this.ws = null;
} else {
this.pcState = WebSocket.CLOSED;
this.pc.close();
this.pc = null;
}
}
video2.srcObject = null;
}
onmjpeg() {
this.ondata = data => {
this.video.controls = false;
this.video.poster = "data:image/jpeg;base64," + VideoRTC.btoa(data);
};
this.send({type: "mjpeg"});
}
onmp4() {
/** @type {HTMLCanvasElement} **/
const canvas = document.createElement("canvas");
/** @type {CanvasRenderingContext2D} */
let context;
/** @type {HTMLVideoElement} */
const video2 = document.createElement("video");
video2.autoplay = true;
video2.playsInline = true;
video2.muted = true;
video2.addEventListener("loadeddata", ev => {
if (!context) {
canvas.width = video2.videoWidth;
canvas.height = video2.videoHeight;
context = canvas.getContext('2d');
}
context.drawImage(video2, 0, 0, canvas.width, canvas.height);
this.video.controls = false;
this.video.poster = canvas.toDataURL("image/jpeg");
});
this.ondata = data => {
video2.src = "data:video/mp4;base64," + VideoRTC.btoa(data);
};
this.send({type: "mp4", value: this.codecs("mp4")});
}
static btoa(buffer) {
const bytes = new Uint8Array(buffer);
const len = bytes.byteLength;
let binary = "";
for (let i = 0; i < len; i++) {
binary += String.fromCharCode(bytes[i]);
}
return window.btoa(binary);
}
}

98
archive/video-stream.js Normal file
View File

@@ -0,0 +1,98 @@
import {VideoRTC} from "./video-rtc.js";
class VideoStream extends VideoRTC {
set divMode(value) {
this.querySelector(".mode").innerText = value;
this.querySelector(".status").innerText = "";
}
set divError(value) {
const state = this.querySelector(".mode").innerText;
if (state !== "loading") return;
this.querySelector(".mode").innerText = "error";
this.querySelector(".status").innerText = value;
}
/**
* Custom GUI
*/
oninit() {
console.debug("stream.oninit");
super.oninit();
this.innerHTML = `
<style>
video-stream {
position: relative;
}
.info {
position: absolute;
top: 0;
left: 0;
right: 0;
padding: 12px;
color: white;
display: flex;
justify-content: space-between;
pointer-events: none;
}
</style>
<div class="info">
<div class="status"></div>
<div class="mode"></div>
</div>
`;
const info = this.querySelector(".info")
this.insertBefore(this.video, info);
}
onconnect() {
console.debug("stream.onconnect");
const result = super.onconnect();
if (result) this.divMode = "loading";
return result;
}
ondisconnect() {
console.debug("stream.ondisconnect");
super.ondisconnect();
}
onopen() {
console.debug("stream.onopen");
const result = super.onopen();
this.onmessage["stream"] = msg => {
console.debug("stream.onmessge", msg);
switch (msg.type) {
case "error":
this.divError = msg.value;
break;
case "mse":
case "mp4":
case "mjpeg":
this.divMode = msg.type.toUpperCase();
break;
}
}
return result;
}
onclose() {
console.debug("stream.onclose");
return super.onclose();
}
onpcvideo(ev) {
console.debug("stream.onpcvideo");
super.onpcvideo(ev);
if (this.pcState !== WebSocket.CLOSED) {
this.divMode = "RTC";
}
}
}
customElements.define("video-stream", VideoStream);

159
archive/webrtc.html Normal file
View File

@@ -0,0 +1,159 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>go2rtc - WebRTC</title>
<style>
body {
background-color: black;
margin: 0;
padding: 0;
}
html,
body,
video {
height: 100%;
width: 100%;
aspect-ratio: 16 / 9;
object-fit: fill;
}
div {
display: flex;
}
.column {
flex-direction: column;
}
.row {
flex-direction: row;
flex: 1;
}
.videoContainer {
flex: 1;
}
</style>
</head>
<body>
<div class="column">
<div class="row">
<div class="videoContainer">
<video id="video" autoplay controls playsinline muted></video>
</div>
<div class="videoContainer"><video id="video2" autoplay controls playsinline muted></video></div>
<div class="videoContainer"><video id="video3" autoplay controls playsinline muted></video></div>
</div>
<div class="row">
<div class="videoContainer">
<video id="video4" autoplay controls playsinline muted></video>
</div>
<div class="videoContainer"><video id="video5" autoplay controls playsinline muted></video></div>
<div class="videoContainer"><video id="video6" autoplay controls playsinline muted></video></div>
</div>
</div>
<script>
async function PeerConnection(media) {
const pc = new RTCPeerConnection({
iceServers: [{ urls: 'stun:stun.l.google.com:19302' }],
})
const localTracks = []
if (/camera|microphone/.test(media)) {
const tracks = await getMediaTracks('user', {
video: media.indexOf('camera') >= 0,
audio: media.indexOf('microphone') >= 0,
})
tracks.forEach((track) => {
pc.addTransceiver(track, { direction: 'sendonly' })
if (track.kind === 'video') localTracks.push(track)
})
}
if (media.indexOf('display') >= 0) {
const tracks = await getMediaTracks('display', {
video: true,
audio: media.indexOf('speaker') >= 0,
})
tracks.forEach((track) => {
pc.addTransceiver(track, { direction: 'sendonly' })
if (track.kind === 'video') localTracks.push(track)
})
}
if (/video|audio/.test(media)) {
const tracks = ['video', 'audio']
.filter((kind) => media.indexOf(kind) >= 0)
.map((kind) => pc.addTransceiver(kind, { direction: 'recvonly' }).receiver.track)
localTracks.push(...tracks)
}
return { pc, localTracks }
}
async function getMediaTracks(media, constraints) {
try {
const stream =
media === 'user'
? await navigator.mediaDevices.getUserMedia(constraints)
: await navigator.mediaDevices.getDisplayMedia(constraints)
return stream.getTracks()
} catch (e) {
console.warn(e)
return []
}
}
async function connect(url, media, videoId) {
const { pc, localTracks } = await PeerConnection(media)
// const url = new URL('api/ws' + location.search, location.href)
document.getElementById(videoId).srcObject = new MediaStream(localTracks)
const ws = new WebSocket('ws' + url.toString().substring(4))
ws.addEventListener('open', () => {
pc.addEventListener('icecandidate', (ev) => {
if (!ev.candidate) return
const msg = { type: 'webrtc/candidate', value: ev.candidate.candidate }
ws.send(JSON.stringify(msg))
})
pc.createOffer()
.then((offer) => pc.setLocalDescription(offer))
.then(() => {
const msg = { type: 'webrtc/offer', value: pc.localDescription.sdp }
ws.send(JSON.stringify(msg))
})
})
ws.addEventListener('message', (ev) => {
const msg = JSON.parse(ev.data)
if (msg.type === 'webrtc/candidate') {
pc.addIceCandidate({ candidate: msg.value, sdpMid: '0' })
} else if (msg.type === 'webrtc/answer') {
pc.setRemoteDescription({ type: 'answer', sdp: msg.value })
}
})
}
const media = new URLSearchParams(location.search).get('media')
const start = () => {
const url = new URL('http://10.10.0.64:1984/api/ws?src=unifi_einfahrt&media=video')
const url2 = new URL('http://10.10.0.64:1984/api/ws?src=unifi_haustuer&media=video')
const url3 = new URL('http://10.10.0.64:1984/api/ws?src=unifi_garage&media=video')
const url4 = new URL('http://10.10.0.64:1984/api/ws?src=unifi_garten&media=video')
const url5 = new URL('http://10.10.0.64:1984/api/ws?src=unifi_wintergarten&media=video')
connect(url, media || 'video+audio', 'video')
connect(url2, media || 'video+audio', 'video2')
connect(url3, media || 'video+audio', 'video3')
connect(url4, media || 'video+audio', 'video4')
connect(url5, media || 'video+audio', 'video5')
}
start()
</script>
</body>
</html>

48
package.json Normal file
View File

@@ -0,0 +1,48 @@
{
"name": "commandcenter",
"version": "0.1.0",
"private": true,
"dependencies": {
"@heroicons/react": "^2.0.17",
"@testing-library/jest-dom": "^5.14.1",
"@testing-library/react": "^13.0.0",
"@testing-library/user-event": "^13.2.1",
"@types/jest": "^27.0.1",
"@types/node": "^16.7.13",
"@types/react": "^18.0.0",
"@types/react-dom": "^18.0.0",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-scripts": "5.0.1",
"typescript": "^4.4.2",
"web-vitals": "^2.1.0"
},
"scripts": {
"start": "react-scripts start",
"build": "react-scripts build",
"test": "react-scripts test",
"eject": "react-scripts eject",
"deploy": "rsync -ar build/* 10.10.0.64:/home/jens/Docker/commandcenter/html"
},
"eslintConfig": {
"extends": [
"react-app",
"react-app/jest"
]
},
"browserslist": {
"production": [
">0.2%",
"not dead",
"not op_mini all"
],
"development": [
"last 1 chrome version",
"last 1 firefox version",
"last 1 safari version"
]
},
"devDependencies": {
"tailwindcss": "^3.3.1"
}
}

BIN
public/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

43
public/index.html Normal file
View File

@@ -0,0 +1,43 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="icon" href="%PUBLIC_URL%/favicon.ico" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="theme-color" content="#000000" />
<meta
name="description"
content="Web site created using create-react-app"
/>
<link rel="apple-touch-icon" href="%PUBLIC_URL%/logo192.png" />
<!--
manifest.json provides metadata used when your web app is installed on a
user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/
-->
<link rel="manifest" href="%PUBLIC_URL%/manifest.json" />
<!--
Notice the use of %PUBLIC_URL% in the tags above.
It will be replaced with the URL of the `public` folder during the build.
Only files inside the `public` folder can be referenced from the HTML.
Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will
work correctly both with client-side routing and a non-root public URL.
Learn how to configure a non-root public URL by running `npm run build`.
-->
<title>React App</title>
</head>
<body>
<noscript>You need to enable JavaScript to run this app.</noscript>
<div id="root"></div>
<!--
This HTML file is a template.
If you open it directly in the browser, you will see an empty page.
You can add webfonts, meta tags, or analytics to this file.
The build step will place the bundled scripts into the <body> tag.
To begin the development, run `npm start` or `yarn start`.
To create a production bundle, use `npm run build` or `yarn build`.
-->
</body>
</html>

BIN
public/logo192.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.2 KiB

BIN
public/logo512.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.4 KiB

25
public/manifest.json Normal file
View File

@@ -0,0 +1,25 @@
{
"short_name": "React App",
"name": "Create React App Sample",
"icons": [
{
"src": "favicon.ico",
"sizes": "64x64 32x32 24x24 16x16",
"type": "image/x-icon"
},
{
"src": "logo192.png",
"type": "image/png",
"sizes": "192x192"
},
{
"src": "logo512.png",
"type": "image/png",
"sizes": "512x512"
}
],
"start_url": ".",
"display": "standalone",
"theme_color": "#000000",
"background_color": "#ffffff"
}

3
public/robots.txt Normal file
View File

@@ -0,0 +1,3 @@
# https://www.robotstxt.org/robotstxt.html
User-agent: *
Disallow:

38
src/App.css Normal file
View File

@@ -0,0 +1,38 @@
.App {
text-align: center;
}
.App-logo {
height: 40vmin;
pointer-events: none;
}
@media (prefers-reduced-motion: no-preference) {
.App-logo {
animation: App-logo-spin infinite 20s linear;
}
}
.App-header {
background-color: #282c34;
min-height: 100vh;
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
font-size: calc(10px + 2vmin);
color: white;
}
.App-link {
color: #61dafb;
}
@keyframes App-logo-spin {
from {
transform: rotate(0deg);
}
to {
transform: rotate(360deg);
}
}

9
src/App.test.tsx Normal file
View File

@@ -0,0 +1,9 @@
import React from 'react';
import { render, screen } from '@testing-library/react';
import App from './App';
test('renders learn react link', () => {
render(<App />);
const linkElement = screen.getByText(/learn react/i);
expect(linkElement).toBeInTheDocument();
});

47
src/App.tsx Normal file
View File

@@ -0,0 +1,47 @@
import { useEffect, useState } from 'react'
import './App.css'
import { Video } from './Video'
import { start } from './lib/start'
import { ArrowPathIcon } from '@heroicons/react/24/solid'
function App() {
const [mediaStreams, setMediaStreams] = useState<MediaStream[]>([])
const [focusedIndex, setFocusedIndex] = useState<number>(-1)
useEffect(() => {
start()
.then((streams) => Promise.all(streams))
.then((streams) => {
setMediaStreams(streams)
})
}, [])
const handleClick = (index: number) => (e: React.MouseEvent<HTMLDivElement, MouseEvent>) => {
if (focusedIndex === index) {
setFocusedIndex(-1)
} else {
setFocusedIndex(index)
}
}
return (
<div>
<div className='grid grid-cols-3'>
{mediaStreams.map((stream, index) => (
<div key={`medistream-${index}`} onClick={handleClick(index)}>
<Video mediaStream={stream} index={index} focused={focusedIndex === index} />
</div>
))}
</div>
<button
className={'absolute bg-white p-2 shadow-xl top-3 right-3 rounded-lg'}
onClick={() => window.location.reload()}
>
<ArrowPathIcon className='h-6 w-6 text-black' />
</button>
</div>
)
}
export default App

39
src/Video.tsx Normal file
View File

@@ -0,0 +1,39 @@
import React, { FC, useEffect, useRef } from 'react'
export const Video: FC<{ mediaStream: MediaStream; index: number; focused: boolean }> = ({
mediaStream,
index,
focused,
}) => {
const ref = useRef<HTMLVideoElement>(null)
useEffect(() => {
if (ref.current) {
ref.current.srcObject = mediaStream
}
}, [mediaStream])
const map = [
'origin-top-left',
'origin-top',
'origin-top-right',
'origin-left',
'',
'origin-right',
'origin-bottom-left',
'origin-bottom',
'origin-bottom-right',
]
return (
<div className={`transition ${focused ? 'scale-200' : ''} ${map[index]}`}>
<video
ref={ref}
autoPlay
muted={!focused}
// controls={focused}
className={'aspect-video object-fill h-full w-full'}
/>
</div>
)
}

15
src/index.css Normal file
View File

@@ -0,0 +1,15 @@
@tailwind base;
@tailwind components;
@tailwind utilities;
body {
margin: 0;
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen', 'Ubuntu', 'Cantarell', 'Fira Sans',
'Droid Sans', 'Helvetica Neue', sans-serif;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
code {
font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New', monospace;
}

19
src/index.tsx Normal file
View File

@@ -0,0 +1,19 @@
import React from 'react';
import ReactDOM from 'react-dom/client';
import './index.css';
import App from './App';
import reportWebVitals from './reportWebVitals';
const root = ReactDOM.createRoot(
document.getElementById('root') as HTMLElement
);
root.render(
<React.StrictMode>
<App />
</React.StrictMode>
);
// If you want to start measuring performance in your app, pass a function
// to log results (for example: reportWebVitals(console.log))
// or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals
reportWebVitals();

85
src/lib/connect.ts Normal file
View File

@@ -0,0 +1,85 @@
export async function PeerConnection(media: string) {
const pc = new RTCPeerConnection({
iceServers: [{ urls: 'stun:stun.l.google.com:19302' }],
})
const localTracks = []
if (/camera|microphone/.test(media)) {
const tracks = await getMediaTracks('user', {
video: media.indexOf('camera') >= 0,
audio: media.indexOf('microphone') >= 0,
})
tracks.forEach((track) => {
pc.addTransceiver(track, { direction: 'sendonly' })
if (track.kind === 'video') localTracks.push(track)
})
}
if (media.indexOf('display') >= 0) {
const tracks = await getMediaTracks('display', {
video: true,
audio: media.indexOf('speaker') >= 0,
})
tracks.forEach((track) => {
pc.addTransceiver(track, { direction: 'sendonly' })
if (track.kind === 'video') localTracks.push(track)
})
}
if (/video|audio/.test(media)) {
const tracks = ['video', 'audio']
.filter((kind) => media.indexOf(kind) >= 0)
.map((kind) => pc.addTransceiver(kind, { direction: 'recvonly' }).receiver.track)
localTracks.push(...tracks)
}
return { pc, localTracks }
}
export async function getMediaTracks(media: string, constraints?: MediaStreamConstraints) {
try {
const stream =
media === 'user'
? await navigator.mediaDevices.getUserMedia(constraints)
: await navigator.mediaDevices.getDisplayMedia(constraints)
return stream.getTracks()
} catch (e) {
console.warn(e)
return []
}
}
export async function connect(url: string, media: string) {
const { pc, localTracks } = await PeerConnection(media)
// document.getElementById(videoId).srcObject = new MediaStream(localTracks)
const ws = new WebSocket('ws' + url.toString().substring(4))
ws.addEventListener('open', () => {
pc.addEventListener('icecandidate', (ev) => {
if (!ev.candidate) return
const msg = { type: 'webrtc/candidate', value: ev.candidate.candidate }
ws.send(JSON.stringify(msg))
})
pc.createOffer()
.then((offer) => pc.setLocalDescription(offer))
.then(() => {
const msg = { type: 'webrtc/offer', value: pc.localDescription?.sdp }
ws.send(JSON.stringify(msg))
})
})
ws.addEventListener('message', (ev) => {
const msg = JSON.parse(ev.data)
if (msg.type === 'webrtc/candidate') {
pc.addIceCandidate({ candidate: msg.value, sdpMid: '0' })
} else if (msg.type === 'webrtc/answer') {
pc.setRemoteDescription({ type: 'answer', sdp: msg.value })
}
})
return new MediaStream(localTracks)
}

15
src/lib/start.ts Normal file
View File

@@ -0,0 +1,15 @@
import { connect } from './connect'
const baseUrl = 'http://10.10.0.64:1984/'
export const start = async () => {
const test = await (await fetch('http://10.10.0.64:1984/api/streams')).json()
const streamNames = Object.keys(test)
const mediaStreams = streamNames.map((streamName) => {
const url = `${baseUrl}api/ws?src=${streamName}`
return connect(url, 'video+audio')
})
return mediaStreams
}

637
src/lib/videoRtc.ts Normal file
View File

@@ -0,0 +1,637 @@
/**
* Video player for go2rtc streaming application.
*
* All modern web technologies are supported in almost any browser except Apple Safari.
*
* Support:
* - RTCPeerConnection for Safari iOS 11.0+
* - IntersectionObserver for Safari iOS 12.2+
*
* Doesn't support:
* - MediaSource for Safari iOS all
* - Customized built-in elements (extends HTMLVideoElement) because all Safari
* - Public class fields because old Safari (before 14.0)
* - Autoplay for Safari
*/
export interface Message {
type: string
value: string
}
export class VideoRTC extends HTMLElement {
background: boolean
CODECS: string[]
connectTS: number
DISCONNECT_TIMEOUT: number
RECONNECT_TIMEOUT: number
disconnectTID?: NodeJS.Timeout
reconnectTID?: NodeJS.Timeout
mode: string
mseCodecs: string
ondata: ((eventData: ArrayBufferLike) => void) | null
onmessage: { [key: string]: (msg: Message) => void }
pc: RTCPeerConnection | null
pcConfig: RTCConfiguration & { iceServers: { urls: string }[]; sdpSemantics: string }
pcState: number
video: HTMLVideoElement | null
visibilityCheck: boolean
visibilityThreshold: number
ws: WebSocket | null
wsState: number
wsURL: string
constructor() {
super()
this.DISCONNECT_TIMEOUT = 5000
this.RECONNECT_TIMEOUT = 30000
this.CODECS = [
'avc1.640029', // H.264 high 4.1 (Chromecast 1st and 2nd Gen)
'avc1.64002A', // H.264 high 4.2 (Chromecast 3rd Gen)
'avc1.640033', // H.264 high 5.1 (Chromecast with Google TV)
'hvc1.1.6.L153.B0', // H.265 main 5.1 (Chromecast Ultra)
'mp4a.40.2', // AAC LC
'mp4a.40.5', // AAC HE
'opus', // OPUS Chrome
]
/**
* [config] Supported modes (webrtc, mse, mp4, mjpeg).
* @type {string}
*/
this.mode = 'webrtc,mse,mp4,mjpeg'
/**
* [config] Run stream when not displayed on the screen. Default `false`.
* @type {boolean}
*/
this.background = false
/**
* [config] Run stream only when player in the viewport. Stop when user scroll out player.
* Value is percentage of visibility from `0` (not visible) to `1` (full visible).
* Default `0` - disable;
* @type {number}
*/
this.visibilityThreshold = 0
/**
* [config] Run stream only when browser page on the screen. Stop when user change browser
* tab or minimise browser windows.
* @type {boolean}
*/
this.visibilityCheck = true
/**
* [config] WebRTC configuration
* @type {RTCConfiguration}
*/
this.pcConfig = {
iceServers: [{ urls: 'stun:stun.l.google.com:19302' }],
sdpSemantics: 'unified-plan', // important for Chromecast 1
}
/**
* [info] WebSocket connection state. Values: CONNECTING, OPEN, CLOSED
* @type {number}
*/
this.wsState = WebSocket.CLOSED
/**
* [info] WebRTC connection state.
* @type {number}
*/
this.pcState = WebSocket.CLOSED
/**
* @type {HTMLVideoElement}
*/
this.video = null
/**
* @type {WebSocket}
*/
this.ws = null
/**
* @type {string|URL}
*/
this.wsURL = ''
/**
* @type {RTCPeerConnection}
*/
this.pc = null
/**
* @type {number}
*/
this.connectTS = 0
/**
* @type {string}
*/
this.mseCodecs = ''
/**
* [internal] Disconnect TimeoutID.
* @type {number}
*/
this.disconnectTID = undefined
/**
* [internal] Reconnect TimeoutID.
* @type {number}
*/
this.reconnectTID = undefined
/**
* [internal] Handler for receiving Binary from WebSocket.
* @type {Function}
*/
this.ondata = null
/**
* [internal] Handlers list for receiving JSON from WebSocket
* @type {Object.<string,Function>}}
*/
this.onmessage = {}
}
/**
* Set video source (WebSocket URL). Support relative path.
* @param {string|URL} value
*/
set src(value: string | URL) {
if (typeof value !== 'string') value = value.toString()
if (value.startsWith('http')) {
value = 'ws' + value.substring(4)
}
this.wsURL = value
this.onconnect()
}
/**
* Play video. Support automute when autoplay blocked.
* https://developer.chrome.com/blog/autoplay/
*/
play() {
this.video?.play().catch((er) => {
if (er.name === 'NotAllowedError' && !this.video?.muted) {
if (this.video) this.video.muted = true
this.video?.play().catch(() => console.debug)
}
})
}
/**
* Send message to server via WebSocket
* @param {Object} value
*/
send(value: string | object) {
if (this.ws) this.ws.send(JSON.stringify(value))
}
codecs(type: string) {
const test =
type === 'mse'
? (codec: string) => MediaSource.isTypeSupported(`video/mp4; codecs="${codec}"`)
: (codec: string) => this.video?.canPlayType(`video/mp4; codecs="${codec}"`)
return this.CODECS.filter(test).join()
}
/**
* `CustomElement`. Invoked each time the custom element is appended into a
* document-connected element.
*/
connectedCallback() {
if (this.disconnectTID) {
clearTimeout(this.disconnectTID)
this.disconnectTID = undefined
}
// because video autopause on disconnected from DOM
if (this.video) {
const seek = this.video.seekable
if (seek.length > 0) {
this.video.currentTime = seek.end(seek.length - 1)
}
this.play()
} else {
this.oninit()
}
this.onconnect()
}
/**
* `CustomElement`. Invoked each time the custom element is disconnected from the
* document's DOM.
*/
disconnectedCallback() {
if (this.background || this.disconnectTID) return
if (this.wsState === WebSocket.CLOSED && this.pcState === WebSocket.CLOSED) return
this.disconnectTID = setTimeout(() => {
if (this.reconnectTID) {
clearTimeout(this.reconnectTID)
this.reconnectTID = undefined
}
this.disconnectTID = undefined
this.ondisconnect()
}, this.DISCONNECT_TIMEOUT)
}
/**
* Creates child DOM elements. Called automatically once on `connectedCallback`.
*/
oninit() {
this.video = document.createElement('video')
this.video.controls = true
this.video.playsInline = true
this.video.preload = 'auto'
this.video.style.display = 'block' // fix bottom margin 4px
this.video.style.width = '100%'
this.video.style.height = '100%'
this.appendChild(this.video)
if (this.background) return
if ('hidden' in document && this.visibilityCheck) {
document.addEventListener('visibilitychange', () => {
if (document.hidden) {
this.disconnectedCallback()
} else if (this.isConnected) {
this.connectedCallback()
}
})
}
if ('IntersectionObserver' in window && this.visibilityThreshold) {
const observer = new IntersectionObserver(
(entries) => {
entries.forEach((entry) => {
if (!entry.isIntersecting) {
this.disconnectedCallback()
} else if (this.isConnected) {
this.connectedCallback()
}
})
},
{ threshold: this.visibilityThreshold }
)
observer.observe(this)
}
}
/**
* Connect to WebSocket. Called automatically on `connectedCallback`.
* @return {boolean} true if the connection has started.
*/
onconnect() {
if (!this.isConnected || !this.wsURL || this.ws || this.pc) return false
// CLOSED or CONNECTING => CONNECTING
this.wsState = WebSocket.CONNECTING
this.connectTS = Date.now()
this.ws = new WebSocket(this.wsURL)
this.ws.binaryType = 'arraybuffer'
this.ws.addEventListener('open', (ev) => this.onopen())
this.ws.addEventListener('close', (ev) => this.onclose())
return true
}
ondisconnect() {
this.wsState = WebSocket.CLOSED
if (this.ws) {
this.ws.close()
this.ws = null
}
this.pcState = WebSocket.CLOSED
if (this.pc) {
this.pc.close()
this.pc = null
}
}
/**
* @returns {Array.<string>} of modes (mse, webrtc, etc.)
*/
onopen() {
// CONNECTING => OPEN
this.wsState = WebSocket.OPEN
this.ws?.addEventListener('message', (ev) => {
if (typeof ev.data === 'string') {
const msg = JSON.parse(ev.data)
for (const mode in this.onmessage) {
this.onmessage[mode](msg)
}
} else {
this.ondata?.(ev.data)
}
})
this.ondata = null
this.onmessage = {}
const modes: string[] = []
if (this.mode.indexOf('mse') >= 0 && 'MediaSource' in window) {
// iPhone
modes.push('mse')
this.onmse()
} else if (this.mode.indexOf('mp4') >= 0) {
modes.push('mp4')
this.onmp4()
}
if (this.mode.indexOf('webrtc') >= 0 && 'RTCPeerConnection' in window) {
// macOS Desktop app
modes.push('webrtc')
this.onwebrtc()
}
if (this.mode.indexOf('mjpeg') >= 0) {
if (modes.length) {
this.onmessage['mjpeg'] = (msg) => {
if (msg.type !== 'error' || msg.value.indexOf(modes[0]) !== 0) return
this.onmjpeg()
}
} else {
modes.push('mjpeg')
this.onmjpeg()
}
}
return modes
}
/**
* @return {boolean} true if reconnection has started.
*/
// @ts-expect-error
onclose() {
if (this.wsState === WebSocket.CLOSED) return false
// CONNECTING, OPEN => CONNECTING
this.wsState = WebSocket.CONNECTING
this.ws = null
// reconnect no more than once every X seconds
const delay = Math.max(this.RECONNECT_TIMEOUT - (Date.now() - this.connectTS), 0)
this.reconnectTID = setTimeout(() => {
this.reconnectTID = undefined
this.onconnect()
}, delay)
return true
}
onmse() {
const ms = new MediaSource()
ms.addEventListener(
'sourceopen',
() => {
this.video && URL.revokeObjectURL(this.video.src)
this.send({ type: 'mse', value: this.codecs('mse') })
},
{ once: true }
)
if (this.video) {
this.video.src = URL.createObjectURL(ms)
this.video.srcObject = null
}
this.play()
this.mseCodecs = ''
this.onmessage['mse'] = (msg) => {
if (msg.type !== 'mse') return
this.mseCodecs = msg.value
const sb = ms.addSourceBuffer(msg.value)
sb.mode = 'segments' // segments or sequence
sb.addEventListener('updateend', () => {
if (sb.updating) return
try {
if (bufLen > 0) {
const data = buf.slice(0, bufLen)
bufLen = 0
sb.appendBuffer(data)
} else if (sb.buffered && sb.buffered.length) {
const end = sb.buffered.end(sb.buffered.length - 1) - 15
const start = sb.buffered.start(0)
if (end > start) {
sb.remove(start, end)
ms.setLiveSeekableRange(end, end + 15)
}
// console.debug("VideoRTC.buffered", start, end);
}
} catch (e) {
// console.debug(e);
}
})
const buf = new Uint8Array(2 * 1024 * 1024)
let bufLen = 0
this.ondata = (data) => {
if (sb.updating || bufLen > 0) {
const b = new Uint8Array(data)
buf.set(b, bufLen)
bufLen += b.byteLength
// console.debug("VideoRTC.buffer", b.byteLength, bufLen);
} else {
try {
sb.appendBuffer(data)
} catch (e) {
// console.debug(e);
}
}
}
}
}
onwebrtc() {
const pc = new RTCPeerConnection(this.pcConfig)
/** @type {HTMLVideoElement} */
const video2 = document.createElement('video')
video2.addEventListener('loadeddata', (ev) => this.onpcvideo(ev), { once: true })
pc.addEventListener('icecandidate', (ev) => {
const candidate = ev.candidate ? ev.candidate.toJSON().candidate : ''
this.send({ type: 'webrtc/candidate', value: candidate })
})
pc.addEventListener('track', (ev) => {
// when stream already init
if (video2.srcObject !== null) return
// when audio track not exist in Chrome
if (ev.streams.length === 0) return
// when audio track not exist in Firefox
if (ev.streams[0].id[0] === '{') return
video2.srcObject = ev.streams[0]
})
pc.addEventListener('connectionstatechange', () => {
if (pc.connectionState === 'failed' || pc.connectionState === 'disconnected') {
pc.close() // stop next events
this.pcState = WebSocket.CLOSED
this.pc = null
this.onconnect()
}
})
this.onmessage['webrtc'] = (msg) => {
switch (msg.type) {
case 'webrtc/candidate':
pc.addIceCandidate({
candidate: msg.value,
sdpMid: '0',
}).catch(() => console.debug)
break
case 'webrtc/answer':
pc.setRemoteDescription({
type: 'answer',
sdp: msg.value,
}).catch(() => console.debug)
break
case 'error':
if (msg.value.indexOf('webrtc/offer') < 0) return
pc.close()
}
}
// Safari doesn't support "offerToReceiveVideo"
pc.addTransceiver('video', { direction: 'recvonly' })
pc.addTransceiver('audio', { direction: 'recvonly' })
pc.createOffer().then((offer) => {
pc.setLocalDescription(offer).then(() => {
this.send({ type: 'webrtc/offer', value: offer.sdp })
})
})
this.pcState = WebSocket.CONNECTING
this.pc = pc
}
/**
* @param ev {Event}
*/
onpcvideo(ev: Event) {
if (!this.pc) return
/** @type {HTMLVideoElement} */
const video2 = ev.target as HTMLVideoElement | null
const state = this.pc.connectionState
// Firefox doesn't support pc.connectionState
if (state === 'connected' || state === 'connecting' || !state) {
// Video+Audio > Video, H265 > H264, Video > Audio, WebRTC > MSE
let rtcPriority = 0,
msePriority = 0
/** @type {MediaStream} */
const ms = video2?.srcObject as MediaStream | null
if (ms && ms.getVideoTracks().length > 0) rtcPriority += 0x220
if (ms && ms.getAudioTracks().length > 0) rtcPriority += 0x102
if (this.mseCodecs.indexOf('hvc1.') >= 0) msePriority += 0x230
if (this.mseCodecs.indexOf('avc1.') >= 0) msePriority += 0x210
if (this.mseCodecs.indexOf('mp4a.') >= 0) msePriority += 0x101
if (rtcPriority >= msePriority) {
this.video && (this.video.srcObject = ms)
this.play()
this.pcState = WebSocket.OPEN
this.wsState = WebSocket.CLOSED
this.ws?.close()
this.ws = null
} else {
this.pcState = WebSocket.CLOSED
this.pc.close()
this.pc = null
}
}
video2?.srcObject && (video2.srcObject = null)
}
onmjpeg() {
this.ondata = (data) => {
this.video && (this.video.controls = false)
this.video && (this.video.poster = 'data:image/jpeg;base64,' + VideoRTC.btoa(data))
}
this.send({ type: 'mjpeg' })
}
onmp4() {
/** @type {HTMLCanvasElement} **/
const canvas = document.createElement('canvas')
/** @type {CanvasRenderingContext2D} */
let context: CanvasRenderingContext2D | null
/** @type {HTMLVideoElement} */
const video2 = document.createElement('video')
video2.autoplay = true
video2.playsInline = true
video2.muted = true
video2.addEventListener('loadeddata', (ev) => {
if (!context) {
canvas.width = video2.videoWidth
canvas.height = video2.videoHeight
context = canvas.getContext('2d')
}
context?.drawImage(video2, 0, 0, canvas.width, canvas.height)
this.video && (this.video.controls = false)
this.video && (this.video.poster = canvas.toDataURL('image/jpeg'))
})
this.ondata = (data) => {
video2.src = 'data:video/mp4;base64,' + VideoRTC.btoa(data)
}
this.send({ type: 'mp4', value: this.codecs('mp4') })
}
static btoa(buffer: ArrayBufferLike) {
const bytes = new Uint8Array(buffer)
const len = bytes.byteLength
let binary = ''
for (let i = 0; i < len; i++) {
binary += String.fromCharCode(bytes[i])
}
return window.btoa(binary)
}
}

59
src/lib/videoStream.ts Normal file
View File

@@ -0,0 +1,59 @@
import { Message, VideoRTC } from './videoRtc.js'
class VideoStream extends VideoRTC {
/**
* Custom GUI
*/
oninit() {
console.debug('stream.oninit')
super.oninit()
}
onconnect() {
console.debug('stream.onconnect')
const result = super.onconnect()
if (result) console.log('loading')
return result
}
ondisconnect() {
console.debug('stream.ondisconnect')
super.ondisconnect()
}
onopen() {
console.debug('stream.onopen')
const result = super.onopen()
this.onmessage['stream'] = (msg: Message) => {
console.debug('stream.onmessge', msg)
switch (msg.type) {
case 'error':
console.log('Error', msg.value)
break
case 'mse':
case 'mp4':
case 'mjpeg':
console.log('Type', msg.type.toUpperCase())
break
}
}
return result
}
onclose = () => {
console.debug('stream.onclose')
return super.onclose()
}
onpcvideo(ev: RTCPeerConnectionIceEvent) {
super.onpcvideo(ev)
if (this.pcState !== WebSocket.CLOSED) {
console.log('RTC')
}
}
}
customElements.define('video-stream', VideoStream)

1
src/logo.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 841.9 595.3"><g fill="#61DAFB"><path d="M666.3 296.5c0-32.5-40.7-63.3-103.1-82.4 14.4-63.6 8-114.2-20.2-130.4-6.5-3.8-14.1-5.6-22.4-5.6v22.3c4.6 0 8.3.9 11.4 2.6 13.6 7.8 19.5 37.5 14.9 75.7-1.1 9.4-2.9 19.3-5.1 29.4-19.6-4.8-41-8.5-63.5-10.9-13.5-18.5-27.5-35.3-41.6-50 32.6-30.3 63.2-46.9 84-46.9V78c-27.5 0-63.5 19.6-99.9 53.6-36.4-33.8-72.4-53.2-99.9-53.2v22.3c20.7 0 51.4 16.5 84 46.6-14 14.7-28 31.4-41.3 49.9-22.6 2.4-44 6.1-63.6 11-2.3-10-4-19.7-5.2-29-4.7-38.2 1.1-67.9 14.6-75.8 3-1.8 6.9-2.6 11.5-2.6V78.5c-8.4 0-16 1.8-22.6 5.6-28.1 16.2-34.4 66.7-19.9 130.1-62.2 19.2-102.7 49.9-102.7 82.3 0 32.5 40.7 63.3 103.1 82.4-14.4 63.6-8 114.2 20.2 130.4 6.5 3.8 14.1 5.6 22.5 5.6 27.5 0 63.5-19.6 99.9-53.6 36.4 33.8 72.4 53.2 99.9 53.2 8.4 0 16-1.8 22.6-5.6 28.1-16.2 34.4-66.7 19.9-130.1 62-19.1 102.5-49.9 102.5-82.3zm-130.2-66.7c-3.7 12.9-8.3 26.2-13.5 39.5-4.1-8-8.4-16-13.1-24-4.6-8-9.5-15.8-14.4-23.4 14.2 2.1 27.9 4.7 41 7.9zm-45.8 106.5c-7.8 13.5-15.8 26.3-24.1 38.2-14.9 1.3-30 2-45.2 2-15.1 0-30.2-.7-45-1.9-8.3-11.9-16.4-24.6-24.2-38-7.6-13.1-14.5-26.4-20.8-39.8 6.2-13.4 13.2-26.8 20.7-39.9 7.8-13.5 15.8-26.3 24.1-38.2 14.9-1.3 30-2 45.2-2 15.1 0 30.2.7 45 1.9 8.3 11.9 16.4 24.6 24.2 38 7.6 13.1 14.5 26.4 20.8 39.8-6.3 13.4-13.2 26.8-20.7 39.9zm32.3-13c5.4 13.4 10 26.8 13.8 39.8-13.1 3.2-26.9 5.9-41.2 8 4.9-7.7 9.8-15.6 14.4-23.7 4.6-8 8.9-16.1 13-24.1zM421.2 430c-9.3-9.6-18.6-20.3-27.8-32 9 .4 18.2.7 27.5.7 9.4 0 18.7-.2 27.8-.7-9 11.7-18.3 22.4-27.5 32zm-74.4-58.9c-14.2-2.1-27.9-4.7-41-7.9 3.7-12.9 8.3-26.2 13.5-39.5 4.1 8 8.4 16 13.1 24 4.7 8 9.5 15.8 14.4 23.4zM420.7 163c9.3 9.6 18.6 20.3 27.8 32-9-.4-18.2-.7-27.5-.7-9.4 0-18.7.2-27.8.7 9-11.7 18.3-22.4 27.5-32zm-74 58.9c-4.9 7.7-9.8 15.6-14.4 23.7-4.6 8-8.9 16-13 24-5.4-13.4-10-26.8-13.8-39.8 13.1-3.1 26.9-5.8 41.2-7.9zm-90.5 125.2c-35.4-15.1-58.3-34.9-58.3-50.6 0-15.7 22.9-35.6 58.3-50.6 8.6-3.7 18-7 27.7-10.1 5.7 19.6 13.2 40 22.5 60.9-9.2 20.8-16.6 41.1-22.2 60.6-9.9-3.1-19.3-6.5-28-10.2zM310 490c-13.6-7.8-19.5-37.5-14.9-75.7 1.1-9.4 2.9-19.3 5.1-29.4 19.6 4.8 41 8.5 63.5 10.9 13.5 18.5 27.5 35.3 41.6 50-32.6 30.3-63.2 46.9-84 46.9-4.5-.1-8.3-1-11.3-2.7zm237.2-76.2c4.7 38.2-1.1 67.9-14.6 75.8-3 1.8-6.9 2.6-11.5 2.6-20.7 0-51.4-16.5-84-46.6 14-14.7 28-31.4 41.3-49.9 22.6-2.4 44-6.1 63.6-11 2.3 10.1 4.1 19.8 5.2 29.1zm38.5-66.7c-8.6 3.7-18 7-27.7 10.1-5.7-19.6-13.2-40-22.5-60.9 9.2-20.8 16.6-41.1 22.2-60.6 9.9 3.1 19.3 6.5 28.1 10.2 35.4 15.1 58.3 34.9 58.3 50.6-.1 15.7-23 35.6-58.4 50.6zM320.8 78.4z"/><circle cx="420.9" cy="296.5" r="45.7"/><path d="M520.5 78.1z"/></g></svg>

After

Width:  |  Height:  |  Size: 2.6 KiB

1
src/react-app-env.d.ts vendored Normal file
View File

@@ -0,0 +1 @@
/// <reference types="react-scripts" />

15
src/reportWebVitals.ts Normal file
View File

@@ -0,0 +1,15 @@
import { ReportHandler } from 'web-vitals';
const reportWebVitals = (onPerfEntry?: ReportHandler) => {
if (onPerfEntry && onPerfEntry instanceof Function) {
import('web-vitals').then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => {
getCLS(onPerfEntry);
getFID(onPerfEntry);
getFCP(onPerfEntry);
getLCP(onPerfEntry);
getTTFB(onPerfEntry);
});
}
};
export default reportWebVitals;

5
src/setupTests.ts Normal file
View File

@@ -0,0 +1,5 @@
// jest-dom adds custom jest matchers for asserting on DOM nodes.
// allows you to do things like:
// expect(element).toHaveTextContent(/react/i)
// learn more: https://github.com/testing-library/jest-dom
import '@testing-library/jest-dom';

16
tailwind.config.js Normal file
View File

@@ -0,0 +1,16 @@
/** @type {import('tailwindcss').Config} */
module.exports = {
content: ['./src/**/*.{js,jsx,ts,tsx}'],
theme: {
extend: {
animation: {
wiggle: 'wiggle 1s ease-in-out infinite',
'spin-slow': 'spin 3s linear infinite',
},
scale: {
200: '2.00',
},
},
},
plugins: [],
}

26
tsconfig.json Normal file
View File

@@ -0,0 +1,26 @@
{
"compilerOptions": {
"target": "es5",
"lib": [
"dom",
"dom.iterable",
"esnext"
],
"allowJs": true,
"skipLibCheck": true,
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"strict": true,
"forceConsistentCasingInFileNames": true,
"noFallthroughCasesInSwitch": true,
"module": "esnext",
"moduleResolution": "node",
"resolveJsonModule": true,
"isolatedModules": true,
"noEmit": true,
"jsx": "react-jsx"
},
"include": [
"src"
]
}

9348
yarn.lock Normal file

File diff suppressed because it is too large Load Diff