Files
memefast/resources/js/modules/editor/partials/canvas/video-export.jsx
2025-06-21 21:55:52 +08:00

597 lines
26 KiB
JavaScript

import { FFmpeg } from '@ffmpeg/ffmpeg';
import { fetchFile, toBlobURL } from '@ffmpeg/util';
import { useCallback, useEffect, useMemo, useState } from 'react';
// Font configuration mapping
const FONT_CONFIG = {
Montserrat: {
normal: '/fonts/Montserrat/static/Montserrat-Regular.ttf',
bold: '/fonts/Montserrat/static/Montserrat-Bold.ttf',
italic: '/fonts/Montserrat/static/Montserrat-Italic.ttf',
boldItalic: '/fonts/Montserrat/static/Montserrat-BoldItalic.ttf',
},
Arial: {
normal: '/arial.ttf',
bold: '/arial.ttf',
italic: '/arial.ttf',
boldItalic: '/arial.ttf',
},
Bungee: {
normal: '/fonts/Bungee/Bungee-Regular.ttf',
bold: '/fonts/Bungee/Bungee-Regular.ttf',
italic: '/fonts/Bungee/Bungee-Regular.ttf',
boldItalic: '/fonts/Bungee/Bungee-Regular.ttf',
},
};
const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
const [showConsoleLogs] = useState(true);
const [isExporting, setIsExporting] = useState(false);
const [exportProgress, setExportProgress] = useState(0);
const [exportStatus, setExportStatus] = useState('');
// Helper function to get font file path based on font family and style
const getFontFilePath = (fontFamily, fontWeight, fontStyle) => {
const family = fontFamily || 'Arial';
const config = FONT_CONFIG[family] || FONT_CONFIG.Arial;
const isBold = fontWeight === 'bold' || fontWeight === 700;
const isItalic = fontStyle === 'italic';
if (isBold && isItalic) {
return config.boldItalic;
} else if (isBold) {
return config.bold;
} else if (isItalic) {
return config.italic;
} else {
return config.normal;
}
};
useEffect(() => {
console.log(JSON.stringify(timelineElements));
}, [timelineElements]);
// Helper function to convert color format for FFmpeg
const formatColorForFFmpeg = (color) => {
// Handle hex colors (e.g., #ffffff or #fff)
if (color && color.startsWith('#')) {
// Remove the # and ensure it's 6 characters
let hex = color.slice(1);
if (hex.length === 3) {
// Convert short hex to full hex (e.g., fff -> ffffff)
hex = hex
.split('')
.map((char) => char + char)
.join('');
}
return `0x${hex}`;
}
// Handle named colors or other formats - fallback to original
return color || '0xffffff';
};
// Helper function to properly escape shell arguments
const escapeShellArg = (arg) => {
// If argument contains spaces, brackets, or other special characters, quote it
if (/[\s\[\]()$`"'\\|&;<>*?~]/.test(arg)) {
return `"${arg.replace(/"/g, '\\"')}"`;
}
return arg;
};
// Better text escaping function for FFmpeg drawtext filter
const escapeTextForDrawtext = (text) => {
return text
.replace(/\\/g, '\\\\\\\\') // Escape backslashes - needs 4 backslashes for proper escaping
.replace(/'/g, "\\'") // Escape single quotes
.replace(/:/g, '\\:') // Escape colons
.replace(/\[/g, '\\[') // Escape square brackets
.replace(/\]/g, '\\]')
.replace(/,/g, '\\,') // Escape commas
.replace(/;/g, '\\;') // Escape semicolons
.replace(/\|/g, '\\|') // Escape pipes
.replace(/\n/g, ' ') // Replace newlines with spaces
.replace(/\r/g, ' ') // Replace carriage returns with spaces
.replace(/\t/g, ' '); // Replace tabs with spaces
};
const generateFFmpegCommand = useCallback(
(is_string = true, useLocalFiles = false) => {
showConsoleLogs && console.log('🎬 STARTING FFmpeg generation');
showConsoleLogs && console.log(`📐 Canvas size: ${dimensions.width}x${dimensions.height}, Duration: ${totalDuration}s`);
const videos = timelineElements.filter((el) => el.type === 'video');
const images = timelineElements.filter((el) => el.type === 'image');
const texts = timelineElements.filter((el) => el.type === 'text');
showConsoleLogs && console.log('Videos found:', videos.length);
showConsoleLogs && console.log('Images found:', images.length);
showConsoleLogs && console.log('Texts found:', texts.length);
// Check for WebM videos with potential transparency
const webmVideos = videos.filter((v) => v.source_webm && (v.source_webm.includes('.webm') || v.source_webm.includes('webm')));
if (webmVideos.length > 0) {
showConsoleLogs && console.log(`🌟 Found ${webmVideos.length} WebM video(s) - will preserve transparency`);
}
// Summary of all elements for debugging
showConsoleLogs && console.log('📋 Element Summary:');
videos.forEach((v, i) => showConsoleLogs && console.log(` Video ${i}: Layer ${v.layer} (${v.x},${v.y}) ${v.width}x${v.height}`));
images.forEach(
(img, i) => showConsoleLogs && console.log(` Image ${i}: Layer ${img.layer} (${img.x},${img.y}) ${img.width}x${img.height}`),
);
texts.forEach((t, i) => showConsoleLogs && console.log(` Text ${i}: Layer ${t.layer} (${t.x},${t.y}) "${t.text.substring(0, 30)}..."`));
if (videos.length === 0 && images.length === 0) {
if (is_string) {
return 'ffmpeg -y -c:v libvpx-vp9 -f lavfi -i color=black:size=450x800:duration=1 -t 1 -vcodec libx264 output.mp4';
} else {
return [
'-y',
'-c:v',
'libvpx-vp9',
'-f',
'lavfi',
'-i',
'color=black:size=450x800:duration=1',
'-t',
'1',
'-vcodec',
'libx264',
'output.mp4',
];
}
}
let inputArgs = [];
let inputIndex = 0;
// Add video inputs
videos.forEach((v, i) => {
inputArgs.push('-i');
inputArgs.push(useLocalFiles ? `input_video_${i}.webm` : v.source_webm);
inputIndex++;
});
// Add image inputs with loop and duration
images.forEach((img, i) => {
inputArgs.push('-loop', '1', '-t', img.duration.toString(), '-i');
inputArgs.push(useLocalFiles ? `input_image_${i}.jpg` : img.source);
inputIndex++;
});
let filters = [];
filters.push(`color=black:size=${dimensions.width}x${dimensions.height}:duration=${totalDuration}[base]`);
let videoLayer = 'base';
// Sort all visual elements by layer, then process in layer order
const allVisualElements = [
...videos.map((v, i) => ({ ...v, elementType: 'video', originalIndex: i })),
...images.map((img, i) => ({ ...img, elementType: 'image', originalIndex: i })),
...texts.map((t, i) => ({ ...t, elementType: 'text', originalIndex: i })),
].sort((a, b) => (a.layer || 0) - (b.layer || 0)); // Sort by layer (lowest first)
showConsoleLogs &&
console.log(
'🎭 Processing order by layer:',
allVisualElements.map((el) => `${el.elementType}${el.originalIndex}(L${el.layer || 0})`).join(' → '),
);
// Track input indices for videos and images
let videoInputIndex = 0;
let imageInputIndex = videos.length; // Images start after videos
// Process elements in layer order
allVisualElements.forEach((element, processingIndex) => {
if (element.elementType === 'video') {
const v = element;
const i = element.originalIndex;
showConsoleLogs &&
console.log(
`🎬 Video ${i} (Layer ${v.layer || 0}) - Position: (${v.x}, ${v.y}), Size: ${v.width}x${v.height}, Time: ${v.startTime}-${v.startTime + v.duration}`,
);
// Check if video extends outside canvas
if (v.x < 0 || v.y < 0 || v.x + v.width > dimensions.width || v.y + v.height > dimensions.height) {
console.warn(`⚠️ Video ${i} extends outside canvas boundaries`);
}
// Check if this is a WebM video (likely has transparency)
const isWebM = v.source_webm && (v.source_webm.includes('.webm') || v.source_webm.includes('webm'));
filters.push(`[${videoInputIndex}:v]trim=start=${v.inPoint}:duration=${v.duration},setpts=PTS-STARTPTS[v${i}_trim]`);
// For WebM videos, preserve alpha channel during scaling
if (isWebM) {
showConsoleLogs && console.log(`🌟 Video ${i} is WebM - preserving alpha channel`);
filters.push(`[v${i}_trim]scale=${Math.round(v.width)}:${Math.round(v.height)}:flags=bicubic[v${i}_scale]`);
} else {
filters.push(`[v${i}_trim]scale=${Math.round(v.width)}:${Math.round(v.height)}[v${i}_scale]`);
}
// For overlay, ensure alpha blending is enabled for WebM
if (isWebM) {
filters.push(
`[${videoLayer}][v${i}_scale]overlay=${Math.round(v.x)}:${Math.round(v.y)}:enable='between(t,${v.startTime},${
v.startTime + v.duration
})':format=auto:eof_action=pass[v${i}_out]`,
);
} else {
filters.push(
`[${videoLayer}][v${i}_scale]overlay=${Math.round(v.x)}:${Math.round(v.y)}:enable='between(t,${v.startTime},${
v.startTime + v.duration
})'[v${i}_out]`,
);
}
videoLayer = `v${i}_out`;
videoInputIndex++;
} else if (element.elementType === 'image') {
const img = element;
const i = element.originalIndex;
showConsoleLogs &&
console.log(
`🖼️ Image ${i} (Layer ${img.layer || 0}) - Position: (${img.x}, ${img.y}), Size: ${img.width}x${img.height}, Time: ${img.startTime}-${img.startTime + img.duration}`,
);
// Check if image is larger than canvas or positioned outside
if (img.width > dimensions.width || img.height > dimensions.height) {
console.warn(`⚠️ Image ${i} (${img.width}x${img.height}) is larger than canvas (${dimensions.width}x${dimensions.height})`);
}
if (img.x < 0 || img.y < 0 || img.x + img.width > dimensions.width || img.y + img.height > dimensions.height) {
console.warn(`⚠️ Image ${i} extends outside canvas boundaries`);
}
filters.push(`[${imageInputIndex}:v]scale=${Math.round(img.width)}:${Math.round(img.height)}[img${i}_scale]`);
filters.push(
`[${videoLayer}][img${i}_scale]overlay=${Math.round(img.x)}:${Math.round(img.y)}:enable='between(t,${img.startTime},${
img.startTime + img.duration
})'[img${i}_out]`,
);
videoLayer = `img${i}_out`;
imageInputIndex++;
} else if (element.elementType === 'text') {
const t = element;
const i = element.originalIndex;
showConsoleLogs &&
console.log(`📝 Text ${i} (Layer ${t.layer || 0}) - Position: (${t.x}, ${t.y}) Text: "${t.text.substring(0, 30)}..."`);
// Better text escaping for FFmpeg
const escapedText = escapeTextForDrawtext(t.text);
// Get the appropriate font file path
const fontFilePath = getFontFilePath(t.fontFamily, t.fontWeight, t.fontStyle);
const fontFileName = fontFilePath.split('/').pop();
// Center the text: x position is the center point, y is adjusted for baseline
const centerY = Math.round(t.y + t.fontSize * 0.3); // Adjust for text baseline
// Format colors for FFmpeg
const fontColor = formatColorForFFmpeg(t.fill);
const borderColor = formatColorForFFmpeg(t.stroke);
const borderWidth = Math.max(0, t.strokeWidth || 0); // Ensure non-negative
// Build drawtext filter with proper border handling
// FIXED: Wrap enable parameter without quotes to avoid truncation
let drawTextFilter = `[${videoLayer}]drawtext=fontfile=/${fontFileName}:text='${escapedText}':x=(w-tw)/2:y=${centerY}:fontsize=${t.fontSize}:fontcolor=${fontColor}`;
// Only add border if strokeWidth > 0
if (borderWidth > 0) {
drawTextFilter += `:borderw=${borderWidth}:bordercolor=${borderColor}`;
}
// FIXED: Don't wrap enable parameter in quotes - this was causing the truncation
drawTextFilter += `:enable=between(t\\,${t.startTime}\\,${t.startTime + t.duration})[t${i}]`;
showConsoleLogs && console.log(`Text filter ${i}:`, drawTextFilter);
filters.push(drawTextFilter);
videoLayer = `t${i}`;
}
});
showConsoleLogs && console.log('🎵 PROCESSING AUDIO FOR', videos.length, 'VIDEOS');
let audioOutputs = [];
videos.forEach((v, i) => {
const delay = Math.round(v.startTime * 1000);
showConsoleLogs && console.log(`🎵 Audio ${i}: delay=${delay}ms, inPoint=${v.inPoint}, duration=${v.duration}`);
filters.push(`[${i}:a]atrim=start=${v.inPoint}:duration=${v.duration},asetpts=PTS-STARTPTS,adelay=${delay}|${delay}[a${i}]`);
audioOutputs.push(`[a${i}]`);
});
let audioArgs = [];
if (audioOutputs.length === 1) {
filters.push(`[a0]apad=pad_dur=${totalDuration}[audio_final]`);
audioArgs = ['-map', '[audio_final]', '-c:a', 'aac'];
} else if (audioOutputs.length > 1) {
filters.push(`${audioOutputs.join('')}amix=inputs=${audioOutputs.length}:duration=longest[audio_final]`);
audioArgs = ['-map', '[audio_final]', '-c:a', 'aac'];
}
showConsoleLogs && console.log('🎵 Audio args:', audioArgs);
const filterComplex = filters.join('; ');
showConsoleLogs && console.log('🎵 Filter includes atrim:', filterComplex.includes('atrim'));
showConsoleLogs && console.log('📝 Complete filter complex:', filterComplex);
showConsoleLogs &&
console.log(
`🎭 Final layer order:`,
allVisualElements.map((el) => `${el.elementType}${el.originalIndex}(L${el.layer || 0})`).join(' → '),
);
const finalArgs = [
'-y',
'-c:v',
'libvpx-vp9',
...inputArgs,
'-filter_complex',
filterComplex,
'-map',
`[${videoLayer}]`,
...audioArgs,
'-c:a',
'aac',
'-r',
'30',
'-t',
totalDuration.toString(),
'-vcodec',
'libx264',
'output.mp4',
];
if (is_string) {
let inputStrings = [];
videos.forEach((v, i) => {
inputStrings.push(`-i ${escapeShellArg(useLocalFiles ? `input_video_${i}.webm` : v.source_webm)}`);
});
images.forEach((img, i) => {
inputStrings.push(`-loop 1 -t ${img.duration} -i ${escapeShellArg(useLocalFiles ? `input_image_${i}.jpg` : img.source)}`);
});
const inputs = inputStrings.join(' ');
const audioMap = audioArgs.length > 0 ? ` ${audioArgs.map((arg) => escapeShellArg(arg)).join(' ')}` : '';
const command = `ffmpeg -y -c:v libvpx-vp9 ${inputs} -filter_complex ${escapeShellArg(filterComplex)} -map ${escapeShellArg(`[${videoLayer}]`)}${audioMap} -c:a aac -r 30 -t ${totalDuration} -vcodec libx264 output.mp4`;
showConsoleLogs && console.log('🎵 FINAL COMMAND HAS AUDIO:', command.includes('atrim') && command.includes('audio_final'));
return command;
} else {
showConsoleLogs &&
console.log(
'🎵 FINAL ARGS HAVE AUDIO:',
finalArgs.some((arg) => typeof arg === 'string' && arg.includes('atrim')) &&
finalArgs.some((arg) => typeof arg === 'string' && arg.includes('audio_final')),
);
return finalArgs;
}
},
[timelineElements, dimensions, totalDuration, showConsoleLogs],
);
const ffmpegCommand = useMemo(() => {
return generateFFmpegCommand(true, false);
}, [generateFFmpegCommand]);
const copyFFmpegCommand = useCallback(() => {
showConsoleLogs && console.log('🎬 FFMPEG COMMAND GENERATED:');
showConsoleLogs && console.log('Command:', ffmpegCommand);
navigator.clipboard.writeText(ffmpegCommand);
}, [ffmpegCommand, showConsoleLogs]);
const exportVideo = async () => {
setIsExporting(true);
setExportProgress(0);
setExportStatus('Starting export...');
try {
setExportStatus('Loading FFmpeg...');
const ffmpeg = new FFmpeg();
ffmpeg.on('progress', ({ progress }) => {
setExportProgress(Math.round(progress * 100));
});
ffmpeg.on('log', ({ message }) => {
showConsoleLogs && console.log('FFmpeg Log:', message);
});
//const baseURL = 'https://unpkg.com/@ffmpeg/core@0.12.10/dist/esm';
const baseURL = window.location.origin + '/ffmpeg_packages/core/dist/esm';
const coreURL = `${baseURL}/ffmpeg-core.js`;
const wasmURL = `${baseURL}/ffmpeg-core.wasm`;
showConsoleLogs && console.log('Converting JS coreURL...');
const coreBlobURL = await toBlobURL(coreURL, 'text/javascript');
showConsoleLogs && console.log('JS coreURL ready:', coreBlobURL);
showConsoleLogs && console.log('Converting WASM URL...');
const wasmBlobURL = await toBlobURL(wasmURL, 'application/wasm');
showConsoleLogs && console.log('WASM URL ready:', wasmBlobURL);
showConsoleLogs && console.log('Calling ffmpeg.load...');
await ffmpeg.load({
coreURL: coreBlobURL,
wasmURL: wasmBlobURL,
});
showConsoleLogs && console.log('FFmpeg loaded!');
setExportProgress(10);
setExportStatus('Loading fonts...');
// Collect all fonts that need to be loaded with their correct paths
const fontsToLoad = new Map(); // Map from filename to full path
// Add Arial font (fallback)
fontsToLoad.set('arial.ttf', 'https://raw.githubusercontent.com/ffmpegwasm/testdata/master/arial.ttf');
// Add fonts used by text elements
timelineElements
.filter((el) => el.type === 'text')
.forEach((text) => {
const fontFilePath = getFontFilePath(text.fontFamily, text.fontWeight, text.fontStyle);
const fontFileName = fontFilePath.split('/').pop();
// Only add if not already in map and not arial.ttf
if (fontFileName !== 'arial.ttf' && !fontsToLoad.has(fontFileName)) {
fontsToLoad.set(fontFileName, fontFilePath);
}
});
showConsoleLogs && console.log('Fonts to load:', Array.from(fontsToLoad.entries()));
// Load each unique font
let fontProgress = 0;
for (const [fontFileName, fontPath] of fontsToLoad) {
try {
showConsoleLogs && console.log(`Loading font: ${fontFileName} from ${fontPath}`);
await ffmpeg.writeFile(fontFileName, await fetchFile(fontPath));
showConsoleLogs && console.log(`✓ Font ${fontFileName} loaded successfully`);
fontProgress++;
setExportProgress(10 + Math.round((fontProgress / fontsToLoad.size) * 10));
} catch (error) {
console.error(`❌ Failed to load font ${fontFileName} from ${fontPath}:`, error);
// If font loading fails, we'll use arial.ttf as fallback
}
}
showConsoleLogs && console.log('All fonts loaded!');
setExportProgress(20);
setExportStatus('Downloading media...');
const videos = timelineElements.filter((el) => el.type === 'video');
const images = timelineElements.filter((el) => el.type === 'image');
const totalMedia = videos.length + images.length;
showConsoleLogs && console.log(`Total media to download: ${totalMedia} (${videos.length} videos, ${images.length} images)`);
let mediaProgress = 0;
// Download videos
for (let i = 0; i < videos.length; i++) {
try {
showConsoleLogs && console.log(`Downloading video ${i}: ${videos[i].source_webm}`);
await ffmpeg.writeFile(`input_video_${i}.webm`, await fetchFile(videos[i].source_webm));
showConsoleLogs && console.log(`✓ Video ${i} downloaded`);
} catch (error) {
console.error(`❌ Failed to download video ${i}:`, error);
throw new Error(`Failed to download video ${i}: ${error.message}`);
}
mediaProgress++;
setExportProgress(20 + Math.round((mediaProgress / totalMedia) * 40));
}
// Download images
for (let i = 0; i < images.length; i++) {
try {
showConsoleLogs && console.log(`Downloading image ${i}: ${images[i].source}`);
await ffmpeg.writeFile(`input_image_${i}.jpg`, await fetchFile(images[i].source));
showConsoleLogs && console.log(`✓ Image ${i} downloaded`);
} catch (error) {
console.error(`❌ Failed to download image ${i}:`, error);
throw new Error(`Failed to download image ${i}: ${error.message}`);
}
mediaProgress++;
setExportProgress(20 + Math.round((mediaProgress / totalMedia) * 40));
}
showConsoleLogs && console.log('All media downloaded successfully!');
// List all files in FFmpeg filesystem for debugging
try {
const files = await ffmpeg.listDir('/');
showConsoleLogs && console.log('Files in FFmpeg filesystem:', files);
} catch (listError) {
console.warn('Could not list FFmpeg filesystem:', listError);
}
setExportStatus('Processing video...');
let args = generateFFmpegCommand(false, true);
showConsoleLogs && console.log('Generated FFmpeg arguments:', args);
showConsoleLogs && console.log(generateFFmpegCommand(true, true));
setExportProgress(70);
try {
await ffmpeg.exec(args);
showConsoleLogs && console.log('FFmpeg execution completed successfully!');
} catch (execError) {
console.error('FFmpeg execution failed:', execError);
console.error('Failed arguments:', args);
// Log the specific error details
if (execError.message) {
console.error('Error message:', execError.message);
}
throw new Error(`FFmpeg execution failed: ${execError.message || 'Unknown error'}`);
}
setExportStatus('Downloading...');
setExportProgress(90);
const fileData = await ffmpeg.readFile('output.mp4');
const data = new Uint8Array(fileData);
const blob = new Blob([data.buffer], { type: 'video/mp4' });
const url = URL.createObjectURL(blob);
const link = document.createElement('a');
link.href = url;
link.download = 'exported_video.mp4';
link.click();
URL.revokeObjectURL(url);
setExportProgress(100);
setExportStatus('Complete!');
ffmpeg.terminate();
} catch (error) {
console.error('Full export error details:', {
message: error.message,
stack: error.stack,
name: error.name,
code: error.code,
errno: error.errno,
path: error.path,
error: error,
});
setExportStatus(`Failed: ${error.message}`);
} finally {
setTimeout(() => {
setIsExporting(false);
setExportStatus('');
setExportProgress(0);
}, 3000);
}
};
return {
// State
isExporting,
exportProgress,
exportStatus,
ffmpegCommand,
// Functions
copyFFmpegCommand,
exportVideo,
generateFFmpegCommand,
};
};
export default useVideoExport;