[mirotalksfu] - #194 Refactoring

هذا الالتزام موجود في:
Miroslav Pejic
2025-02-19 02:32:10 +01:00
الأصل a0184e1f2c
التزام f9b89fe212
6 ملفات معدلة مع 187 إضافات و137 حذوفات

عرض الملف

@@ -1,166 +1,215 @@
'use strict';
class VirtualBackground {
static instance = null;
constructor() {
if (VirtualBackground.instance) {
return VirtualBackground.instance;
}
VirtualBackground.instance = this;
this.segmentation = null;
this.initialized = false;
this.gifCanvas = null;
this.gifCtx = null;
this.pendingFrames = [];
this.activeProcessor = null;
this.activeGenerator = null;
this.isProcessing = false;
this.gifAnimation = null;
this.currentGifFrame = null;
this.gifCanvas = null;
this.gifContext = null;
}
async initializeSegmentation() {
if (!this.segmentation) {
if (this.initialized) return;
try {
this.segmentation = new SelfieSegmentation({
locateFile: (file) => `https://cdn.jsdelivr.net/npm/@mediapipe/selfie_segmentation/${file}`,
});
this.segmentation.setOptions({ modelSelection: 1 });
}
if (!this.initialized) {
try {
await this.segmentation.initialize();
this.initialized = true;
console.log('✅ Segmentation initialized successfully.');
} catch (error) {
console.error('❌ Error initializing segmentation:', error);
}
this.segmentation.setOptions({ modelSelection: 1 });
this.segmentation.onResults(this.handleSegmentationResults.bind(this));
await this.segmentation.initialize();
this.initialized = true;
console.log('✅ Segmentation initialized successfully.');
} catch (error) {
console.error('❌ Error initializing segmentation:', error);
throw error;
}
}
handleSegmentationResults(results) {
const pending = this.pendingFrames.shift();
if (!pending || !results?.segmentationMask) return;
const { videoFrame, controller, imageBitmap, maskHandler } = pending;
try {
const canvas = new OffscreenCanvas(videoFrame.displayWidth, videoFrame.displayHeight);
const ctx = canvas.getContext('2d');
// Apply original frame
ctx.drawImage(imageBitmap, 0, 0, canvas.width, canvas.height);
// Apply mask processing
maskHandler(ctx, canvas, results.segmentationMask, imageBitmap);
// Create new video frame
const processedFrame = new VideoFrame(canvas, {
timestamp: videoFrame.timestamp,
alpha: 'keep',
});
controller.enqueue(processedFrame);
} catch (error) {
console.error('❌ Frame processing error:', error);
} finally {
// Close frames after processing
if (videoFrame) videoFrame.close();
if (imageBitmap) imageBitmap.close();
}
}
async processStreamWithSegmentation(videoTrack, maskHandler) {
// Stop any existing processor
await this.stopCurrentProcessor();
// Initialize segmentation if not already done
await this.initializeSegmentation();
// Create new processor and generator
const processor = new MediaStreamTrackProcessor({ track: videoTrack });
const generator = new MediaStreamTrackGenerator({ kind: 'video' });
await this.initializeSegmentation(); // Ensure segmentation is initialized
if (!this.segmentation) {
console.error('❌ Segmentation is still undefined after initialization!');
return new MediaStream([videoTrack]); // Fallback to original video stream
}
const processSegmentation = async (videoFrame, controller) => {
try {
const imageBitmap = await createImageBitmap(videoFrame);
// Process segmentation
this.segmentation.onResults(async (results) => {
if (!results || !results.segmentationMask) {
console.warn('⚠️ Segmentation mask is missing, skipping frame.');
videoFrame.close();
imageBitmap.close();
return;
}
const mask = results.segmentationMask;
const canvas = new OffscreenCanvas(videoFrame.displayWidth, videoFrame.displayHeight);
const ctx = canvas.getContext('2d');
// Draw the original video frame
ctx.drawImage(imageBitmap, 0, 0, canvas.width, canvas.height);
// Call the mask handler (either for blur or background)
await maskHandler(ctx, canvas, mask, imageBitmap);
// Convert back to VideoFrame
const newFrame = new VideoFrame(canvas, { timestamp: videoFrame.timestamp });
controller.enqueue(newFrame);
// Close VideoFrame & imageBitmap after processing
videoFrame.close();
imageBitmap.close();
});
// Send frame to MediaPipe
await this.segmentation.send({ image: imageBitmap }).catch((err) => {
console.error('❌ Segmentation processing failed:', err);
return null;
});
} catch (error) {
console.error('❌ Error in processSegmentation:', error);
videoFrame.close();
}
};
const transformer = new TransformStream({
async transform(videoFrame, controller) {
await processSegmentation(videoFrame, controller);
transform: async (videoFrame, controller) => {
try {
const imageBitmap = await createImageBitmap(videoFrame);
this.pendingFrames.push({
videoFrame,
controller,
imageBitmap,
maskHandler,
});
await this.segmentation.send({ image: imageBitmap });
} catch (error) {
console.error('❌ Frame transformation error:', error);
videoFrame.close();
return; // Ensure the frame is closed and processing stops
}
},
flush: () => {
// Cleanup any remaining resources
this.pendingFrames.forEach((frame) => {
if (frame.videoFrame) frame.videoFrame.close();
if (frame.imageBitmap) frame.imageBitmap.close();
});
this.pendingFrames = [];
},
});
processor.readable.pipeThrough(transformer).pipeTo(generator.writable);
// Store active streams
this.activeProcessor = processor;
this.activeGenerator = generator;
this.isProcessing = true;
// Setup error handling
const cleanup = () => {
this.stopCurrentProcessor().catch(() => {});
};
// Start processing pipeline
processor.readable
.pipeThrough(transformer)
.pipeTo(generator.writable)
.catch((error) => {
//console.error('❌ Processing pipeline error:', error);
cleanup();
});
return new MediaStream([generator]);
}
async stopCurrentProcessor() {
if (!this.activeProcessor) return;
try {
// Abort the writable stream first
if (this.activeGenerator?.writable) {
await this.activeGenerator.writable.abort('Processing stopped').catch(() => {});
}
// Cancel the readable stream if not locked
if (this.activeProcessor?.readable && !this.activeProcessor.readable.locked) {
await this.activeProcessor.readable.cancel('Processing stopped').catch(() => {});
}
// Cleanup pending frames
this.pendingFrames.forEach((frame) => {
frame.videoFrame?.close();
frame.imageBitmap?.close();
});
this.pendingFrames = [];
console.log('✅ Processor successfully stopped');
} catch (error) {
console.error('❌ Processor shutdown error:', error);
} finally {
this.activeProcessor = null;
this.activeGenerator = null;
this.isProcessing = false;
}
}
async applyBlurToWebRTCStream(videoTrack, blurLevel = 10) {
const maskHandler = async (ctx, canvas, mask, imageBitmap) => {
// Apply the mask to keep the person in focus
ctx.save();
ctx.globalCompositeOperation = 'destination-in';
ctx.drawImage(mask, 0, 0, canvas.width, canvas.height);
ctx.restore();
// Save the focused person area
const personImageBitmap = await createImageBitmap(canvas);
// Draw the original video frame again
ctx.drawImage(imageBitmap, 0, 0, canvas.width, canvas.height);
// Invert the mask to apply the blur to the background
ctx.save();
ctx.globalCompositeOperation = 'destination-out';
ctx.drawImage(mask, 0, 0, canvas.width, canvas.height);
ctx.restore();
// Apply the blur effect to the background
ctx.filter = `blur(${blurLevel}px)`;
ctx.drawImage(canvas, 0, 0, canvas.width, canvas.height);
// Draw the person back on top of the blurred background
ctx.filter = 'none';
ctx.globalCompositeOperation = 'destination-over';
ctx.drawImage(personImageBitmap, 0, 0, canvas.width, canvas.height);
};
return this.processStreamWithSegmentation(videoTrack, maskHandler);
}
async applyVirtualBackgroundToWebRTCStream(videoTrack, image = 'https://i.postimg.cc/t9PJw5P7/forest.jpg') {
const isGif = image.endsWith('.gif') || image.startsWith('data:image/gif');
const backgroundImage = isGif ? await this.loadGifImage(image) : await this.loadImage(image);
const maskHandler = async (ctx, canvas, mask, imageBitmap) => {
try {
// Apply the mask to keep the person in focus
// Step 1: Apply the mask to keep the person in focus
ctx.save();
ctx.globalCompositeOperation = 'destination-in';
ctx.drawImage(mask, 0, 0, canvas.width, canvas.height);
ctx.restore();
// Save the focused person area
const personImageBitmap = await createImageBitmap(canvas);
// Step 2: Draw the background (it will be blurred later)
ctx.save();
ctx.globalCompositeOperation = 'destination-over';
ctx.filter = `blur(${blurLevel}px)`; // Apply blur to the entire canvas
ctx.drawImage(imageBitmap, 0, 0, canvas.width, canvas.height);
ctx.restore();
// If GIF is detected, draw the current animated frame
if (isGif) {
if (this.currentGifFrame) {
ctx.drawImage(this.currentGifFrame, 0, 0, canvas.width, canvas.height);
}
} else {
// Draw the background image
ctx.drawImage(backgroundImage, 0, 0, canvas.width, canvas.height);
}
// Draw the person back on top of the background image
ctx.globalCompositeOperation = 'source-over';
ctx.drawImage(personImageBitmap, 0, 0, canvas.width, canvas.height);
// Step 3: Redraw the person in focus on top of the blurred background
ctx.save();
ctx.globalCompositeOperation = 'destination-over'; // Ensure the person is on top
ctx.filter = 'none'; // Reset filter to remove blur on the person
ctx.drawImage(imageBitmap, 0, 0, canvas.width, canvas.height);
ctx.restore();
} catch (error) {
console.error('❌ Error in maskHandler:', error);
}
};
if (isGif) this.animateGifBackground();
return this.processStreamWithSegmentation(videoTrack, maskHandler);
}
async applyVirtualBackgroundToWebRTCStream(videoTrack, imageUrl) {
const isGif = imageUrl.endsWith('.gif') || imageUrl.startsWith('data:image/gif');
let background;
isGif ? (background = await this.loadGifImage(imageUrl)) : (background = await this.loadImage(imageUrl));
const maskHandler = (ctx, canvas, mask) => {
// Apply person mask
ctx.globalCompositeOperation = 'destination-in';
ctx.drawImage(mask, 0, 0);
// Draw background
ctx.globalCompositeOperation = 'destination-over';
isGif && this.currentGifFrame
? ctx.drawImage(this.currentGifFrame, 0, 0, canvas.width, canvas.height)
: ctx.drawImage(background, 0, 0, canvas.width, canvas.height);
};
return this.processStreamWithSegmentation(videoTrack, maskHandler);
}
@@ -178,11 +227,11 @@ class VirtualBackground {
return new Promise((resolve, reject) => {
try {
this.gifCanvas = document.createElement('canvas');
this.gifCtx = this.gifCanvas.getContext('2d');
this.gifContext = this.gifCanvas.getContext('2d');
gifler(src).get((a) => {
this.gifAnimation = a;
a.animateInCanvas(this.gifCanvas); // Start the animation
gifler(src).get((animation) => {
this.gifAnimation = animation;
animation.animateInCanvas(this.gifCanvas); // Start the animation
console.log('✅ GIF loaded and animation started.');
resolve(this.gifCanvas);
});
@@ -198,7 +247,9 @@ class VirtualBackground {
// Updates the current GIF frame at each animation step
const updateFrame = () => {
this.currentGifFrame = this.gifCanvas;
if (this.gifAnimation && this.gifCanvas) {
this.currentGifFrame = this.gifCanvas;
}
requestAnimationFrame(updateFrame);
};
updateFrame();