Skip to content

Commit

Permalink
fix: frame offloading
Browse files Browse the repository at this point in the history
  • Loading branch information
marcus-pousette committed Feb 19, 2025
1 parent 7b5c4b8 commit 071bf6d
Show file tree
Hide file tree
Showing 6 changed files with 247 additions and 127 deletions.
12 changes: 12 additions & 0 deletions packages/live-streaming/frontend/src/index.css
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,18 @@ code {
pointer-events: none;
}

.center-middle {
display: flex;
align-items: center;
justify-content: space-evenly;
position: absolute;
pointer-events: none;
width: 100%;
height: 100%;
margin: 0;
bottom: 0;
}


.slider-root {
position: relative;
Expand Down
224 changes: 136 additions & 88 deletions packages/live-streaming/frontend/src/media/streamer/Renderer.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import pDefer from "p-defer";
import { isSafari } from "../utils";
import { convertGPUFrameToCPUFrame } from "./convertGPUFrameToCPUFrame";
import { Tracks } from "../controls/Tracks.js";
import { Spinner } from "../../utils/Spinner.js";

interface HTMLVideoElementWithCaptureStream extends HTMLVideoElement {
captureStream(fps?: number): MediaStream;
Expand Down Expand Up @@ -576,6 +577,8 @@ export const Renderer = (args: { stream: MediaStreamDB }) => {

const wavEncoder = useRef(new WAVEncoder());
const loopCounter = useRef(0);
const frameEncodingQueue = useRef(new PQueue({ concurrency: 1 }));
const [waitingForEncoder, setWaitingForEncoder] = useState(false);

useEffect(() => {
const clickListener = () => {
Expand Down Expand Up @@ -682,6 +685,9 @@ export const Renderer = (args: { stream: MediaStreamDB }) => {
};

const dropAll = async () => {
frameEncodingQueue.current.clear();
setWaitingForEncoder(false);

// we should rerender all so so lets just drop all dbs and pretend we are fresh
if (videoEncoders.current) {
for (const encoder of videoEncoders.current) {
Expand Down Expand Up @@ -948,6 +954,7 @@ export const Renderer = (args: { stream: MediaStreamDB }) => {
let framesSinceLastBackground = 0;
let lastFrameTimestamp = -1;
let firstFrameHighresTimestamp: undefined | number = undefined;
const maxEncoderQueueSize = 30;
const requestFrame = () => {
if (!inBackground && "requestVideoFrameCallback" in videoRef) {
videoRef.requestVideoFrameCallback(frameFn);
Expand Down Expand Up @@ -1002,108 +1009,142 @@ export const Renderer = (args: { stream: MediaStreamDB }) => {
let frame = new VideoFrame(videoRef, {
timestamp,
});

/* console.log("Render frame: ", {
timestamp,
frameTimestamp: frame.timestamp,
observedMediaTime,
firstFrameHighresTimestamp,
}); */
if (preferCPUEncodingRef.current) {
frame = convertGPUFrameToCPUFrame(videoRef, frame);
frame = await convertGPUFrameToCPUFrame(videoRef, frame);
}
// console.log("FRAME", { domHighRes, metadata, timestamp, frameCounter, currentTime: videoRef.currentTime, out: frame.timestamp, outBefore: beforeTimeStamp });

let newTimestamp = frame.timestamp;
/* if (newTimestamp === lastFrameTimestamp) {
frame.close();
return;
} */

if (newTimestamp !== lastFrameTimestamp) {
lastFrameTimestamp = newTimestamp;
for (const videoEncoder of videoEncoders.current) {
const encoder = videoEncoder.encoder();
if (encoder.state !== "closed") {
if (
videoEncoder.video &&
(videoEncoder.video.height !==
videoRef.videoHeight ||
videoEncoder.video.width !==
videoRef.videoWidth)
) {
// Reinitialize a new stream, size the aspect ratio has changed
let limitedQualities = quality.filter(
(x) =>
x.video.height <= videoRef.videoHeight
);
const encodeFrameFn = async () => {
/* console.log("Render frame: ", {
timestamp,
frameTimestamp: frame.timestamp,
observedMediaTime,
firstFrameHighresTimestamp,
}); */

// console.log("FRAME", { domHighRes, metadata, timestamp, frameCounter, currentTime: videoRef.currentTime, out: frame.timestamp, outBefore: beforeTimeStamp });

let newTimestamp = frame.timestamp;
/* if (newTimestamp === lastFrameTimestamp) {
frame.close();
return;
} */

if (newTimestamp !== lastFrameTimestamp) {
lastFrameTimestamp = newTimestamp;
for (const videoEncoder of videoEncoders.current) {
const encoder = videoEncoder.encoder();
if (encoder.state !== "closed") {
if (
limitedQualities.length !== quality.length
videoEncoder.video &&
(videoEncoder.video.height !==
videoRef.videoHeight ||
videoEncoder.video.width !==
videoRef.videoWidth)
) {
frame.close();
await updateStream({
streamType: sourceTypeRef.current,
quality: limitedQualities,
});
return;
} else {
await videoEncoder.open();
// Reinitialize a new stream, size the aspect ratio has changed
let limitedQualities = quality.filter(
(x) =>
x.video.height <=
videoRef.videoHeight
);
if (
limitedQualities.length !==
quality.length
) {
frame.close();
await updateStream({
streamType: sourceTypeRef.current,
quality: limitedQualities,
});
return;
} else {
await videoEncoder.open();
}
// console.log('resolution change reopen!', videoEncoder.video.height, videoRef.videoHeight)
}
// console.log('resolution change reopen!', videoEncoder.video.height, videoRef.videoHeight)
}

videoEncoder.video = {
height: videoRef.videoHeight,
width: videoRef.videoWidth,
};

if (encoder.state === "unconfigured") {
let scaler =
videoEncoder.setting.video.height /
videoRef.videoHeight;
// console.log('set bitrate', videoEncoder.setting.video.bitrate)
encoder.configure({
codec: "vp09.00.51.08.01.01.01.01.00" /* "vp09.00.10.08" */ /* isSafari
videoEncoder.video = {
height: videoRef.videoHeight,
width: videoRef.videoWidth,
};

if (encoder.state === "unconfigured") {
let scaler =
videoEncoder.setting.video.height /
videoRef.videoHeight;
// console.log('set bitrate', videoEncoder.setting.video.bitrate)
encoder.configure({
codec: "vp09.00.51.08.01.01.01.01.00" /* "vp09.00.10.08" */ /* isSafari
? "avc1.428020"
: "av01.0.04M.10" */ /* "vp09.00.10.08", */ /* "avc1.428020" ,*/, //"av01.0.04M.10", // "av01.0.08M.10",//"av01.2.15M.10.0.100.09.16.09.0" //
height: videoEncoder.setting.video.height,
width: videoRef.videoWidth * scaler,
bitrate: videoEncoder.setting.video.bitrate,
latencyMode: "realtime",
bitrateMode: "variable",
});
}

if (encoder.state === "configured") {
if (encoder.encodeQueueSize > 30) {
// Too many frames in flight, encoder is overwhelmed
// let's drop this frame.
encoder.flush();

// TODO in non streaming mode, slow down the playback
} else {
// console.log({ frameCounter, playedFrame: metadata?.presentedFrames, droppedFrames: (metadata?.presentedFrames ?? 0) - frameCounter })
frameCounter++;
const insertKeyframe =
Math.round(
frameCounter /
videoEncoders.current.length
) %
60 ===
0;

//let t1 = +new Date;
// console.log("PUT CHUNK", encoder.encodeQueueSize, (t1 - t0));
// t0 = t1;
encoder.encode(frame, {
keyFrame: insertKeyframe,
height: videoEncoder.setting.video
.height,
width: videoRef.videoWidth * scaler,
bitrate:
videoEncoder.setting.video.bitrate,
latencyMode: "realtime",
bitrateMode: "variable",
});
}

if (encoder.state === "configured") {
if (
encoder.encodeQueueSize >
maxEncoderQueueSize + 1
) {
// Too many frames in flight, encoder is overwhelmed
// let's drop this frame.
encoder.flush();

// TODO in non streaming mode, slow down the playback
} else {
const droppedFrames = Math.max(
(metadata?.presentedFrames ?? 0) -
frameCounter,
0
);
/* console.log({ droppedFrames })
if (metadata?.presentedFrames && metadata?.presentedFrames > 10 && droppedFrames / metadata.presentedFrames > 0.1) {
videoRef.playbackRate = 0.3;
} */
console.log({
frameCounter,
playedFrame:
metadata?.presentedFrames,
droppedFrames,
});
frameCounter++;
const insertKeyframe =
Math.round(
frameCounter /
videoEncoders.current.length
) %
60 ===
0;

//let t1 = +new Date;
// console.log("PUT CHUNK", encoder.encodeQueueSize, (t1 - t0));
// t0 = t1;
encoder.encode(frame, {
keyFrame: insertKeyframe,
});
}
}
}
}
}
frame.close();
};

frameEncodingQueue.current.add(encodeFrameFn);

if (frameEncodingQueue.current.size > maxEncoderQueueSize) {
videoRef.pause();
setWaitingForEncoder(true);
frameEncodingQueue.current.onIdle().then(() => {
videoRef.play();
setWaitingForEncoder(false);
});
}
frame.close();
} catch (error) {
console.error("err?", error);
throw error;
Expand Down Expand Up @@ -1195,6 +1236,12 @@ export const Renderer = (args: { stream: MediaStreamDB }) => {
: ""
}
></video>
{waitingForEncoder && (
<div className="center-middle">
Waiting for encoder:{" "}
{frameEncodingQueue.current.size}
</div>
)}
{sourceType == null ? (
<div className="mt-4 mb-4">
<FirstMenuSelect
Expand Down Expand Up @@ -1245,6 +1292,7 @@ export const Renderer = (args: { stream: MediaStreamDB }) => {
</div>
</div>
</div>

{sourceType != null && (
<Tracks
mediaStreams={args.stream}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,36 +1,51 @@
// Create a persistent offscreen canvas and its context
const offscreen = new OffscreenCanvas(1, 1);
const ctx = offscreen.getContext("2d");
import pDefer, { DeferredPromise } from "p-defer";

// Create a persistent worker (adjust the path as needed)
const frameWorker = new Worker(new URL("./frameWorker.js", import.meta.url));

// We'll assign a unique id to each conversion request.
let nextMessageId = 0;
const pendingRequests = new Map<number, DeferredPromise<VideoFrame>>();

// Listen for responses from the worker.
frameWorker.addEventListener("message", (event: MessageEvent) => {
const { id, cpuFrame, error } = event.data;
const deferred = pendingRequests.get(id);
if (!deferred) return;
if (error) {
deferred.reject(new Error(error));
} else {
deferred.resolve(cpuFrame);
}
pendingRequests.delete(id);
});

/**
* Convert a GPU-backed frame to a CPU-accessible one using the reusable offscreen canvas.
* The canvas is resized if needed.
* Convert a GPU-backed VideoFrame to a CPU-accessible VideoFrame.
*
* This function sends the provided VideoFrame (source) along with the dimensions
* obtained from the video element (videoRef) to a worker. The worker draws the
* frame on its offscreen canvas and returns a new VideoFrame.
*
* @param videoRef - The HTMLVideoElement (used to obtain dimensions).
* @param source - The GPU-backed VideoFrame to convert.
* @returns A promise that resolves to a CPU-accessible VideoFrame.
*/
export const convertGPUFrameToCPUFrame = (
videoRef: HTMLVideoElement,
source: VideoFrame
) => {
// Resize canvas if dimensions don't match
if (
offscreen.width !== videoRef.videoWidth ||
offscreen.height !== videoRef.videoHeight
) {
offscreen.width = videoRef.videoWidth;
offscreen.height = videoRef.videoHeight;
}

// Draw the source (video element or VideoFrame) onto the offscreen canvas
ctx.drawImage(source, 0, 0, offscreen.width, offscreen.height);

// Get an ImageBitmap from the canvas
const bitmap = offscreen.transferToImageBitmap();
): Promise<VideoFrame> => {
const deferred = pDefer<VideoFrame>();
const messageId = nextMessageId++;
pendingRequests.set(messageId, deferred);

// Create a new VideoFrame from the ImageBitmap
const cpuFrame = new VideoFrame(bitmap, { timestamp: source.timestamp });
// Get the desired dimensions from the video element.
const width = videoRef.videoWidth;
const height = videoRef.videoHeight;

// Clean up the bitmap (it's no longer needed)
bitmap.close();
// Post the message to the worker.
// Transfer the VideoFrame (source) to avoid a copy.
frameWorker.postMessage({ id: messageId, source, width, height }, [source]);

source.close();
return cpuFrame;
return deferred.promise;
};
Loading

0 comments on commit 071bf6d

Please sign in to comment.