737 lines
31 KiB
JavaScript
737 lines
31 KiB
JavaScript
import { FFmpeg } from '@ffmpeg/ffmpeg';
|
|
import { fetchFile, toBlobURL } from '@ffmpeg/util';
|
|
import Konva from 'konva';
|
|
import { useCallback, useEffect, useMemo, useState } from 'react';
|
|
|
|
// Import centralized font management
|
|
import { getFontStyle, loadTimelineFonts, WATERMARK_CONFIG } from '@/modules/editor/fonts';
|
|
|
|
const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
|
|
const [showConsoleLogs] = useState(true);
|
|
|
|
const [isExporting, setIsExporting] = useState(false);
|
|
const [exportProgress, setExportProgress] = useState(0);
|
|
const [exportStatus, setExportStatus] = useState('');
|
|
|
|
useEffect(() => {
|
|
console.log(JSON.stringify(timelineElements));
|
|
}, [timelineElements]);
|
|
|
|
// Helper function to convert color format for FFmpeg
|
|
const formatColorForFFmpeg = (color) => {
|
|
// Handle hex colors (e.g., #ffffff or #fff)
|
|
if (color && color.startsWith('#')) {
|
|
// Remove the # and ensure it's 6 characters
|
|
let hex = color.slice(1);
|
|
if (hex.length === 3) {
|
|
// Convert short hex to full hex (e.g., fff -> ffffff)
|
|
hex = hex
|
|
.split('')
|
|
.map((char) => char + char)
|
|
.join('');
|
|
}
|
|
return `0x${hex}`;
|
|
}
|
|
// Handle named colors or other formats - fallback to original
|
|
return color || '0xffffff';
|
|
};
|
|
|
|
// Helper function to properly escape shell arguments
|
|
const escapeShellArg = (arg) => {
|
|
// If argument contains spaces, brackets, or other special characters, quote it
|
|
if (/[\s\[\]()$`"'\\|&;<>*?~]/.test(arg)) {
|
|
return `"${arg.replace(/"/g, '\\"')}"`;
|
|
}
|
|
return arg;
|
|
};
|
|
|
|
// Ensure text element has proper width properties for consistent rendering
|
|
const ensureTextProperties = async (textElement, dimensions) => {
|
|
// If fixedWidth and offsetX are already set, use them
|
|
if (textElement.fixedWidth !== undefined && textElement.offsetX !== undefined) {
|
|
return textElement;
|
|
}
|
|
|
|
// Create a temporary stage to measure text dimensions
|
|
const tempStage = new Konva.Stage({
|
|
container: document.createElement('div'),
|
|
width: dimensions.width,
|
|
height: dimensions.height,
|
|
});
|
|
|
|
const tempLayer = new Konva.Layer();
|
|
tempStage.add(tempLayer);
|
|
|
|
// Create temporary text node to measure
|
|
const tempTextNode = new Konva.Text({
|
|
text: textElement.text,
|
|
fontSize: textElement.fontSize,
|
|
fontStyle: getFontStyle(textElement),
|
|
fontFamily: textElement.fontFamily || 'Montserrat',
|
|
align: 'center',
|
|
verticalAlign: 'middle',
|
|
wrap: 'word',
|
|
// Use a reasonable width for text wrapping (80% of canvas width)
|
|
width: dimensions.width * 0.8,
|
|
});
|
|
|
|
tempLayer.add(tempTextNode);
|
|
tempTextNode._setTextData();
|
|
|
|
// Get measured dimensions
|
|
const measuredWidth = tempTextNode.width();
|
|
const measuredTextWidth = tempTextNode.textWidth;
|
|
|
|
// Calculate offsetX for center alignment
|
|
const offsetX = measuredWidth / 2;
|
|
|
|
// Cleanup
|
|
tempStage.destroy();
|
|
|
|
// Return element with calculated properties
|
|
return {
|
|
...textElement,
|
|
fixedWidth: measuredWidth,
|
|
offsetX: offsetX,
|
|
};
|
|
};
|
|
|
|
// Debug function to compare preview vs export text properties
|
|
const debugTextProperties = (textElement) => {
|
|
if (showConsoleLogs) {
|
|
console.log('🔍 Text Element Properties for Export:');
|
|
console.log(' text:', textElement.text);
|
|
console.log(' x:', textElement.x);
|
|
console.log(' y:', textElement.y);
|
|
console.log(' fontSize:', textElement.fontSize);
|
|
console.log(' fontFamily:', textElement.fontFamily);
|
|
console.log(' fontWeight:', textElement.fontWeight);
|
|
console.log(' fontStyle:', textElement.fontStyle);
|
|
console.log(' width (fixedWidth):', textElement.fixedWidth);
|
|
console.log(' offsetX:', textElement.offsetX);
|
|
console.log(' rotation:', textElement.rotation);
|
|
console.log(' fill:', textElement.fill);
|
|
console.log(' stroke:', textElement.stroke);
|
|
console.log(' strokeWidth:', textElement.strokeWidth);
|
|
}
|
|
};
|
|
|
|
// Render text element to image using Konva
|
|
const renderTextElementToImage = async (textElement, dimensions) => {
|
|
showConsoleLogs && console.log(`🎨 Rendering text element: "${textElement.text.substring(0, 30)}..."`);
|
|
|
|
// Ensure text element has proper width properties
|
|
const processedTextElement = await ensureTextProperties(textElement, dimensions);
|
|
|
|
// Debug text properties
|
|
debugTextProperties(processedTextElement);
|
|
|
|
// Create offscreen stage with canvas dimensions
|
|
const stage = new Konva.Stage({
|
|
container: document.createElement('div'),
|
|
width: dimensions.width,
|
|
height: dimensions.height,
|
|
});
|
|
|
|
const layer = new Konva.Layer();
|
|
stage.add(layer);
|
|
|
|
// Wait a bit for fonts to be ready (same as preview)
|
|
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
|
|
// Create text node with EXACT same properties as preview
|
|
const textNode = new Konva.Text({
|
|
text: processedTextElement.text,
|
|
x: processedTextElement.x,
|
|
y: processedTextElement.y,
|
|
fontSize: processedTextElement.fontSize,
|
|
fontStyle: getFontStyle(processedTextElement),
|
|
fontFamily: processedTextElement.fontFamily || 'Montserrat',
|
|
fill: processedTextElement.fill || '#ffffff',
|
|
stroke: processedTextElement.strokeWidth > 0 ? processedTextElement.stroke || '#000000' : undefined,
|
|
strokeWidth: processedTextElement.strokeWidth * 3 || 0,
|
|
fillAfterStrokeEnabled: true,
|
|
strokeScaleEnabled: false,
|
|
rotation: processedTextElement.rotation || 0,
|
|
// EXACT same alignment as preview
|
|
align: 'center',
|
|
verticalAlign: 'middle',
|
|
wrap: 'word',
|
|
// EXACT same scaling as preview
|
|
scaleX: 1,
|
|
scaleY: 1,
|
|
// EXACT same width/offset as preview
|
|
width: processedTextElement.fixedWidth,
|
|
offsetX: processedTextElement.offsetX,
|
|
});
|
|
|
|
layer.add(textNode);
|
|
|
|
// Force text measurement like in preview
|
|
textNode._setTextData();
|
|
layer.draw();
|
|
|
|
// Wait for rendering to complete
|
|
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
|
|
// Log text dimensions for debugging
|
|
showConsoleLogs &&
|
|
console.log(
|
|
`📏 Export text dimensions: width=${textNode.width()}, height=${textNode.height()}, textWidth=${textNode.textWidth}, textHeight=${textNode.textHeight}`,
|
|
);
|
|
showConsoleLogs &&
|
|
console.log(`📏 Element properties: fixedWidth=${processedTextElement.fixedWidth}, offsetX=${processedTextElement.offsetX}`);
|
|
|
|
// Convert to image with same resolution as canvas
|
|
const dataURL = stage.toDataURL({
|
|
mimeType: 'image/png',
|
|
quality: 1.0,
|
|
pixelRatio: 1, // FIXED: Use 1:1 ratio to match canvas resolution
|
|
});
|
|
|
|
// Cleanup
|
|
stage.destroy();
|
|
|
|
showConsoleLogs && console.log(`✅ Text element rendered to image`);
|
|
return dataURL;
|
|
};
|
|
|
|
// Render watermark to image using Konva
|
|
const renderWatermarkToImage = async (dimensions) => {
|
|
showConsoleLogs && console.log(`🏷️ Rendering watermark`);
|
|
|
|
const stage = new Konva.Stage({
|
|
container: document.createElement('div'),
|
|
width: dimensions.width,
|
|
height: dimensions.height,
|
|
});
|
|
|
|
const layer = new Konva.Layer();
|
|
stage.add(layer);
|
|
|
|
const watermarkText = new Konva.Text({
|
|
text: 'MEMEAIGEN.COM',
|
|
x: dimensions.width / 2,
|
|
y: dimensions.height / 2 + dimensions.height * 0.2,
|
|
fontSize: WATERMARK_CONFIG.fontSize,
|
|
fontFamily: WATERMARK_CONFIG.fontFamily,
|
|
fill: WATERMARK_CONFIG.fill,
|
|
stroke: WATERMARK_CONFIG.stroke,
|
|
strokeWidth: WATERMARK_CONFIG.strokeWidth,
|
|
opacity: WATERMARK_CONFIG.opacity,
|
|
align: 'center',
|
|
verticalAlign: 'middle',
|
|
offsetX: 90, // Approximate half-width to center the text
|
|
offsetY: 5, // Approximate half-height to center the text
|
|
});
|
|
|
|
layer.add(watermarkText);
|
|
layer.draw();
|
|
|
|
const dataURL = stage.toDataURL({
|
|
mimeType: 'image/png',
|
|
quality: 1.0,
|
|
pixelRatio: 1, // FIXED: Match canvas resolution
|
|
});
|
|
|
|
stage.destroy();
|
|
showConsoleLogs && console.log(`✅ Watermark rendered to image`);
|
|
return dataURL;
|
|
};
|
|
|
|
const generateFFmpegCommand = useCallback(
|
|
(is_string = true, useLocalFiles = false, textImages = {}, watermarkFileName = null) => {
|
|
showConsoleLogs && console.log('🎬 STARTING FFmpeg generation');
|
|
showConsoleLogs && console.log(`📐 Canvas size: ${dimensions.width}x${dimensions.height}, Duration: ${totalDuration}s`);
|
|
|
|
const videos = timelineElements.filter((el) => el.type === 'video');
|
|
const images = timelineElements.filter((el) => el.type === 'image');
|
|
const texts = timelineElements.filter((el) => el.type === 'text');
|
|
|
|
showConsoleLogs && console.log('Videos found:', videos.length);
|
|
showConsoleLogs && console.log('Images found:', images.length);
|
|
showConsoleLogs && console.log('Texts found:', texts.length);
|
|
|
|
// Check for WebM videos with potential transparency
|
|
const webmVideos = videos.filter((v) => v.source_webm && (v.source_webm.includes('.webm') || v.source_webm.includes('webm')));
|
|
if (webmVideos.length > 0) {
|
|
showConsoleLogs && console.log(`🌟 Found ${webmVideos.length} WebM video(s) - will preserve transparency`);
|
|
}
|
|
|
|
// Summary of all elements for debugging
|
|
showConsoleLogs && console.log('📋 Element Summary:');
|
|
videos.forEach((v, i) => showConsoleLogs && console.log(` Video ${i}: Layer ${v.layer} (${v.x},${v.y}) ${v.width}x${v.height}`));
|
|
images.forEach(
|
|
(img, i) => showConsoleLogs && console.log(` Image ${i}: Layer ${img.layer} (${img.x},${img.y}) ${img.width}x${img.height}`),
|
|
);
|
|
texts.forEach((t, i) => showConsoleLogs && console.log(` Text ${i}: Layer ${t.layer} (${t.x},${t.y}) "${t.text.substring(0, 30)}..."`));
|
|
|
|
if (videos.length === 0 && images.length === 0) {
|
|
if (is_string) {
|
|
return 'ffmpeg -y -c:v libvpx-vp9 -f lavfi -i color=black:size=450x800:duration=1 -t 1 -vcodec libx264 output.mp4';
|
|
} else {
|
|
return [
|
|
'-y',
|
|
'-c:v',
|
|
'libvpx-vp9',
|
|
'-f',
|
|
'lavfi',
|
|
'-i',
|
|
'color=black:size=450x800:duration=1',
|
|
'-t',
|
|
'1',
|
|
'-vcodec',
|
|
'libx264',
|
|
'output.mp4',
|
|
];
|
|
}
|
|
}
|
|
|
|
let inputArgs = [];
|
|
let inputIndex = 0;
|
|
|
|
// Add video inputs
|
|
videos.forEach((v, i) => {
|
|
inputArgs.push('-i');
|
|
inputArgs.push(useLocalFiles ? `input_video_${i}.webm` : v.source_webm);
|
|
inputIndex++;
|
|
});
|
|
|
|
// Add image inputs with loop and duration
|
|
images.forEach((img, i) => {
|
|
inputArgs.push('-loop', '1', '-t', img.duration.toString(), '-i');
|
|
inputArgs.push(useLocalFiles ? `input_image_${i}.jpg` : img.source);
|
|
inputIndex++;
|
|
});
|
|
|
|
// Add text image inputs
|
|
texts.forEach((text, i) => {
|
|
inputArgs.push('-loop', '1', '-t', totalDuration.toString(), '-i');
|
|
inputArgs.push(useLocalFiles ? `text_${i}.png` : textImages[text.id]?.fileName || `text_${i}.png`);
|
|
inputIndex++;
|
|
});
|
|
|
|
// Add watermark input if exists
|
|
if (watermarkFileName) {
|
|
inputArgs.push('-loop', '1', '-t', totalDuration.toString(), '-i');
|
|
inputArgs.push(watermarkFileName);
|
|
inputIndex++;
|
|
}
|
|
|
|
let filters = [];
|
|
filters.push(`color=black:size=${dimensions.width}x${dimensions.height}:duration=${totalDuration}[base]`);
|
|
|
|
let videoLayer = 'base';
|
|
|
|
// Sort all visual elements by layer, then process in layer order
|
|
const allVisualElements = [
|
|
...videos.map((v, i) => ({ ...v, elementType: 'video', originalIndex: i })),
|
|
...images.map((img, i) => ({ ...img, elementType: 'image', originalIndex: i })),
|
|
...texts.map((t, i) => ({ ...t, elementType: 'text', originalIndex: i })),
|
|
].sort((a, b) => (a.layer || 0) - (b.layer || 0)); // Sort by layer (lowest first)
|
|
|
|
showConsoleLogs &&
|
|
console.log(
|
|
'🎭 Processing order by layer:',
|
|
allVisualElements.map((el) => `${el.elementType}${el.originalIndex}(L${el.layer || 0})`).join(' → '),
|
|
);
|
|
|
|
// Track input indices for videos, images, and text images
|
|
let videoInputIndex = 0;
|
|
let imageInputIndex = videos.length; // Images start after videos
|
|
let textInputIndex = videos.length + images.length; // Text images start after regular images
|
|
|
|
// Process elements in layer order
|
|
allVisualElements.forEach((element, processingIndex) => {
|
|
if (element.elementType === 'video') {
|
|
const v = element;
|
|
const i = element.originalIndex;
|
|
|
|
showConsoleLogs &&
|
|
console.log(
|
|
`🎬 Video ${i} (Layer ${v.layer || 0}) - Position: (${v.x}, ${v.y}), Size: ${v.width}x${v.height}, Time: ${v.startTime}-${v.startTime + v.duration}`,
|
|
);
|
|
|
|
// Check if video extends outside canvas
|
|
if (v.x < 0 || v.y < 0 || v.x + v.width > dimensions.width || v.y + v.height > dimensions.height) {
|
|
console.warn(`⚠️ Video ${i} extends outside canvas boundaries`);
|
|
}
|
|
|
|
// Check if this is a WebM video (likely has transparency)
|
|
const isWebM = v.source_webm && (v.source_webm.includes('.webm') || v.source_webm.includes('webm'));
|
|
|
|
filters.push(`[${videoInputIndex}:v]trim=start=${v.inPoint}:duration=${v.duration},setpts=PTS-STARTPTS[v${i}_trim]`);
|
|
|
|
// For WebM videos, preserve alpha channel during scaling
|
|
if (isWebM) {
|
|
showConsoleLogs && console.log(`🌟 Video ${i} is WebM - preserving alpha channel`);
|
|
filters.push(`[v${i}_trim]scale=${Math.round(v.width)}:${Math.round(v.height)}:flags=bicubic[v${i}_scale]`);
|
|
} else {
|
|
filters.push(`[v${i}_trim]scale=${Math.round(v.width)}:${Math.round(v.height)}[v${i}_scale]`);
|
|
}
|
|
|
|
// For overlay, ensure alpha blending is enabled for WebM
|
|
if (isWebM) {
|
|
filters.push(
|
|
`[${videoLayer}][v${i}_scale]overlay=${Math.round(v.x)}:${Math.round(v.y)}:enable='between(t,${v.startTime},${
|
|
v.startTime + v.duration
|
|
})':format=auto:eof_action=pass[v${i}_out]`,
|
|
);
|
|
} else {
|
|
filters.push(
|
|
`[${videoLayer}][v${i}_scale]overlay=${Math.round(v.x)}:${Math.round(v.y)}:enable='between(t,${v.startTime},${
|
|
v.startTime + v.duration
|
|
})'[v${i}_out]`,
|
|
);
|
|
}
|
|
|
|
videoLayer = `v${i}_out`;
|
|
videoInputIndex++;
|
|
} else if (element.elementType === 'image') {
|
|
const img = element;
|
|
const i = element.originalIndex;
|
|
|
|
showConsoleLogs &&
|
|
console.log(
|
|
`🖼️ Image ${i} (Layer ${img.layer || 0}) - Position: (${img.x}, ${img.y}), Size: ${img.width}x${img.height}, Time: ${img.startTime}-${img.startTime + img.duration}`,
|
|
);
|
|
|
|
// Check if image is larger than canvas or positioned outside
|
|
if (img.width > dimensions.width || img.height > dimensions.height) {
|
|
console.warn(`⚠️ Image ${i} (${img.width}x${img.height}) is larger than canvas (${dimensions.width}x${dimensions.height})`);
|
|
}
|
|
if (img.x < 0 || img.y < 0 || img.x + img.width > dimensions.width || img.y + img.height > dimensions.height) {
|
|
console.warn(`⚠️ Image ${i} extends outside canvas boundaries`);
|
|
}
|
|
|
|
filters.push(`[${imageInputIndex}:v]scale=${Math.round(img.width)}:${Math.round(img.height)}[img${i}_scale]`);
|
|
filters.push(
|
|
`[${videoLayer}][img${i}_scale]overlay=${Math.round(img.x)}:${Math.round(img.y)}:enable='between(t,${img.startTime},${
|
|
img.startTime + img.duration
|
|
})'[img${i}_out]`,
|
|
);
|
|
videoLayer = `img${i}_out`;
|
|
imageInputIndex++;
|
|
} else if (element.elementType === 'text') {
|
|
const t = element;
|
|
const i = element.originalIndex;
|
|
|
|
showConsoleLogs &&
|
|
console.log(`📝 Text ${i} (Layer ${t.layer || 0}) - Text: "${t.text.substring(0, 30)}..." - Using Konva-rendered image`);
|
|
|
|
// Use overlay filter for Konva-rendered text image
|
|
filters.push(
|
|
`[${videoLayer}][${textInputIndex}:v]overlay=0:0:enable='between(t,${t.startTime},${t.startTime + t.duration})'[t${i}_out]`,
|
|
);
|
|
|
|
videoLayer = `t${i}_out`;
|
|
textInputIndex++;
|
|
}
|
|
});
|
|
|
|
// Add watermark overlay if exists
|
|
if (watermarkFileName) {
|
|
filters.push(`[${videoLayer}][${textInputIndex}:v]overlay=0:0[watermark_out]`);
|
|
videoLayer = 'watermark_out';
|
|
}
|
|
|
|
showConsoleLogs && console.log('🎵 PROCESSING AUDIO FOR', videos.length, 'VIDEOS');
|
|
|
|
let audioOutputs = [];
|
|
videos.forEach((v, i) => {
|
|
const delay = Math.round(v.startTime * 1000);
|
|
showConsoleLogs && console.log(`🎵 Audio ${i}: delay=${delay}ms, inPoint=${v.inPoint}, duration=${v.duration}`);
|
|
filters.push(`[${i}:a]atrim=start=${v.inPoint}:duration=${v.duration},asetpts=PTS-STARTPTS,adelay=${delay}|${delay}[a${i}]`);
|
|
audioOutputs.push(`[a${i}]`);
|
|
});
|
|
|
|
let audioArgs = [];
|
|
if (audioOutputs.length === 1) {
|
|
filters.push(`[a0]apad=pad_dur=${totalDuration}[audio_final]`);
|
|
audioArgs = ['-map', '[audio_final]', '-c:a', 'aac'];
|
|
} else if (audioOutputs.length > 1) {
|
|
filters.push(`${audioOutputs.join('')}amix=inputs=${audioOutputs.length}:duration=longest[audio_final]`);
|
|
audioArgs = ['-map', '[audio_final]', '-c:a', 'aac'];
|
|
}
|
|
|
|
showConsoleLogs && console.log('🎵 Audio args:', audioArgs);
|
|
|
|
const filterComplex = filters.join('; ');
|
|
showConsoleLogs && console.log('🎵 Filter includes atrim:', filterComplex.includes('atrim'));
|
|
showConsoleLogs && console.log('📝 Complete filter complex:', filterComplex);
|
|
showConsoleLogs &&
|
|
console.log(
|
|
`🎭 Final layer order:`,
|
|
allVisualElements.map((el) => `${el.elementType}${el.originalIndex}(L${el.layer || 0})`).join(' → '),
|
|
);
|
|
|
|
const finalArgs = [
|
|
'-y',
|
|
'-c:v',
|
|
'libvpx-vp9',
|
|
...inputArgs,
|
|
'-filter_complex',
|
|
filterComplex,
|
|
'-map',
|
|
`[${videoLayer}]`,
|
|
...audioArgs,
|
|
'-c:a',
|
|
'aac',
|
|
'-r',
|
|
'30',
|
|
'-t',
|
|
totalDuration.toString(),
|
|
'-vcodec',
|
|
'libx264',
|
|
'output.mp4',
|
|
];
|
|
|
|
if (is_string) {
|
|
let inputStrings = [];
|
|
|
|
videos.forEach((v, i) => {
|
|
inputStrings.push(`-i ${escapeShellArg(useLocalFiles ? `input_video_${i}.webm` : v.source_webm)}`);
|
|
});
|
|
|
|
images.forEach((img, i) => {
|
|
inputStrings.push(`-loop 1 -t ${img.duration} -i ${escapeShellArg(useLocalFiles ? `input_image_${i}.jpg` : img.source)}`);
|
|
});
|
|
|
|
texts.forEach((text, i) => {
|
|
inputStrings.push(`-loop 1 -t ${totalDuration} -i ${escapeShellArg(useLocalFiles ? `text_${i}.png` : text.id)}`);
|
|
});
|
|
|
|
if (watermarkFileName) {
|
|
inputStrings.push(`-loop 1 -t ${totalDuration} -i ${escapeShellArg(watermarkFileName)}`);
|
|
}
|
|
|
|
const inputs = inputStrings.join(' ');
|
|
const audioMap = audioArgs.length > 0 ? ` ${audioArgs.map((arg) => escapeShellArg(arg)).join(' ')}` : '';
|
|
const command = `ffmpeg -y -c:v libvpx-vp9 ${inputs} -filter_complex ${escapeShellArg(filterComplex)} -map ${escapeShellArg(`[${videoLayer}]`)}${audioMap} -c:a aac -r 30 -t ${totalDuration} -vcodec libx264 output.mp4`;
|
|
|
|
showConsoleLogs && console.log('🎵 FINAL COMMAND HAS AUDIO:', command.includes('atrim') && command.includes('audio_final'));
|
|
|
|
return command;
|
|
} else {
|
|
showConsoleLogs &&
|
|
console.log(
|
|
'🎵 FINAL ARGS HAVE AUDIO:',
|
|
finalArgs.some((arg) => typeof arg === 'string' && arg.includes('atrim')) &&
|
|
finalArgs.some((arg) => typeof arg === 'string' && arg.includes('audio_final')),
|
|
);
|
|
|
|
return finalArgs;
|
|
}
|
|
},
|
|
[timelineElements, dimensions, totalDuration, showConsoleLogs],
|
|
);
|
|
|
|
const ffmpegCommand = useMemo(() => {
|
|
return generateFFmpegCommand(true, false, {}, null);
|
|
}, [generateFFmpegCommand]);
|
|
|
|
const copyFFmpegCommand = useCallback(() => {
|
|
showConsoleLogs && console.log('🎬 FFMPEG COMMAND GENERATED:');
|
|
showConsoleLogs && console.log('Command:', ffmpegCommand);
|
|
navigator.clipboard.writeText(ffmpegCommand);
|
|
}, [ffmpegCommand, showConsoleLogs]);
|
|
|
|
const exportVideo = async () => {
|
|
setIsExporting(true);
|
|
setExportProgress(0);
|
|
setExportStatus('Starting export...');
|
|
|
|
try {
|
|
setExportStatus('Loading FFmpeg...');
|
|
|
|
const ffmpeg = new FFmpeg();
|
|
|
|
ffmpeg.on('progress', ({ progress }) => {
|
|
setExportProgress(Math.round(progress * 100));
|
|
});
|
|
|
|
ffmpeg.on('log', ({ message }) => {
|
|
showConsoleLogs && console.log('FFmpeg Log:', message);
|
|
});
|
|
|
|
const baseURL = window.location.origin + '/ffmpeg_packages/core/dist/esm';
|
|
const coreURL = `${baseURL}/ffmpeg-core.js`;
|
|
const wasmURL = `${baseURL}/ffmpeg-core.wasm`;
|
|
|
|
showConsoleLogs && console.log('Converting JS coreURL...');
|
|
const coreBlobURL = await toBlobURL(coreURL, 'text/javascript');
|
|
showConsoleLogs && console.log('JS coreURL ready:', coreBlobURL);
|
|
|
|
showConsoleLogs && console.log('Converting WASM URL...');
|
|
const wasmBlobURL = await toBlobURL(wasmURL, 'application/wasm');
|
|
showConsoleLogs && console.log('WASM URL ready:', wasmBlobURL);
|
|
|
|
showConsoleLogs && console.log('Calling ffmpeg.load...');
|
|
await ffmpeg.load({
|
|
coreURL: coreBlobURL,
|
|
wasmURL: wasmBlobURL,
|
|
});
|
|
showConsoleLogs && console.log('FFmpeg loaded!');
|
|
setExportProgress(10);
|
|
|
|
setExportStatus('Loading fonts...');
|
|
await loadTimelineFonts(timelineElements);
|
|
showConsoleLogs && console.log('✅ All fonts loaded and ready');
|
|
setExportProgress(15);
|
|
|
|
setExportStatus('Rendering text elements...');
|
|
|
|
// Render text elements to images
|
|
const texts = timelineElements.filter((el) => el.type === 'text');
|
|
const textImages = {};
|
|
|
|
for (let i = 0; i < texts.length; i++) {
|
|
const textElement = texts[i];
|
|
const dataURL = await renderTextElementToImage(textElement, dimensions);
|
|
const imageData = await fetchFile(dataURL);
|
|
|
|
const fileName = `text_${i}.png`;
|
|
await ffmpeg.writeFile(fileName, imageData);
|
|
textImages[textElement.id] = { fileName, index: i };
|
|
|
|
setExportProgress(15 + Math.round((i / texts.length) * 15));
|
|
}
|
|
|
|
// Render watermark if needed (you'll need to pass watermarked prop)
|
|
let watermarkFileName = null;
|
|
// Uncomment if you have watermarked prop available:
|
|
// if (watermarked) {
|
|
// const watermarkDataURL = await renderWatermarkToImage(dimensions);
|
|
// const watermarkImageData = await fetchFile(watermarkDataURL);
|
|
// watermarkFileName = 'watermark.png';
|
|
// await ffmpeg.writeFile(watermarkFileName, watermarkImageData);
|
|
// }
|
|
|
|
setExportProgress(30);
|
|
showConsoleLogs && console.log('✅ All text elements rendered to images');
|
|
|
|
setExportStatus('Downloading media...');
|
|
const videos = timelineElements.filter((el) => el.type === 'video');
|
|
const images = timelineElements.filter((el) => el.type === 'image');
|
|
const totalMedia = videos.length + images.length;
|
|
|
|
showConsoleLogs && console.log(`Total media to download: ${totalMedia} (${videos.length} videos, ${images.length} images)`);
|
|
|
|
let mediaProgress = 0;
|
|
|
|
// Download videos
|
|
for (let i = 0; i < videos.length; i++) {
|
|
try {
|
|
showConsoleLogs && console.log(`Downloading video ${i}: ${videos[i].source_webm}`);
|
|
await ffmpeg.writeFile(`input_video_${i}.webm`, await fetchFile(videos[i].source_webm));
|
|
showConsoleLogs && console.log(`✓ Video ${i} downloaded`);
|
|
} catch (error) {
|
|
console.error(`❌ Failed to download video ${i}:`, error);
|
|
throw new Error(`Failed to download video ${i}: ${error.message}`);
|
|
}
|
|
mediaProgress++;
|
|
setExportProgress(30 + Math.round((mediaProgress / totalMedia) * 40));
|
|
}
|
|
|
|
// Download images
|
|
for (let i = 0; i < images.length; i++) {
|
|
try {
|
|
showConsoleLogs && console.log(`Downloading image ${i}: ${images[i].source}`);
|
|
await ffmpeg.writeFile(`input_image_${i}.jpg`, await fetchFile(images[i].source));
|
|
showConsoleLogs && console.log(`✓ Image ${i} downloaded`);
|
|
} catch (error) {
|
|
console.error(`❌ Failed to download image ${i}:`, error);
|
|
throw new Error(`Failed to download image ${i}: ${error.message}`);
|
|
}
|
|
mediaProgress++;
|
|
setExportProgress(30 + Math.round((mediaProgress / totalMedia) * 40));
|
|
}
|
|
|
|
showConsoleLogs && console.log('All media downloaded successfully!');
|
|
|
|
// List all files in FFmpeg filesystem for debugging
|
|
try {
|
|
const files = await ffmpeg.listDir('/');
|
|
showConsoleLogs && console.log('Files in FFmpeg filesystem:', files);
|
|
} catch (listError) {
|
|
console.warn('Could not list FFmpeg filesystem:', listError);
|
|
}
|
|
|
|
setExportStatus('Processing video...');
|
|
let args = generateFFmpegCommand(false, true, textImages, watermarkFileName);
|
|
|
|
showConsoleLogs && console.log('Generated FFmpeg arguments:', args);
|
|
showConsoleLogs && console.log(generateFFmpegCommand(true, true, textImages, watermarkFileName));
|
|
|
|
setExportProgress(70);
|
|
|
|
try {
|
|
await ffmpeg.exec(args);
|
|
showConsoleLogs && console.log('FFmpeg execution completed successfully!');
|
|
} catch (execError) {
|
|
console.error('FFmpeg execution failed:', execError);
|
|
console.error('Failed arguments:', args);
|
|
|
|
// Log the specific error details
|
|
if (execError.message) {
|
|
console.error('Error message:', execError.message);
|
|
}
|
|
|
|
throw new Error(`FFmpeg execution failed: ${execError.message || 'Unknown error'}`);
|
|
}
|
|
|
|
setExportStatus('Downloading...');
|
|
setExportProgress(90);
|
|
|
|
const fileData = await ffmpeg.readFile('output.mp4');
|
|
const data = new Uint8Array(fileData);
|
|
|
|
const blob = new Blob([data.buffer], { type: 'video/mp4' });
|
|
const url = URL.createObjectURL(blob);
|
|
|
|
const link = document.createElement('a');
|
|
link.href = url;
|
|
link.download = 'exported_video.mp4';
|
|
link.click();
|
|
URL.revokeObjectURL(url);
|
|
|
|
setExportProgress(100);
|
|
setExportStatus('Complete!');
|
|
|
|
ffmpeg.terminate();
|
|
} catch (error) {
|
|
console.error('Full export error details:', {
|
|
message: error.message,
|
|
stack: error.stack,
|
|
name: error.name,
|
|
code: error.code,
|
|
errno: error.errno,
|
|
path: error.path,
|
|
error: error,
|
|
});
|
|
setExportStatus(`Failed: ${error.message}`);
|
|
} finally {
|
|
setTimeout(() => {
|
|
setIsExporting(false);
|
|
setExportStatus('');
|
|
setExportProgress(0);
|
|
}, 3000);
|
|
}
|
|
};
|
|
|
|
return {
|
|
// State
|
|
isExporting,
|
|
exportProgress,
|
|
exportStatus,
|
|
ffmpegCommand,
|
|
|
|
// Functions
|
|
copyFFmpegCommand,
|
|
exportVideo,
|
|
generateFFmpegCommand,
|
|
};
|
|
};
|
|
|
|
export default useVideoExport;
|