This commit is contained in:
ct
2025-06-16 15:00:15 +08:00
parent d924f25fc7
commit d1fdeb6409
4 changed files with 160 additions and 12 deletions

View File

@@ -75,6 +75,20 @@ const sampleTimelineElements = [
stroke: 'red',
strokeWidth: 2,
},
{
id: '6',
type: 'image',
source: 'https://cdn.memeaigen.com/g1/webp/este-cat-dance.webp',
name: 'Este cat dance',
startTime: 0,
layer: 5,
inPoint: 1,
duration: 6,
x: 200,
y: 200,
width: 280,
height: 180,
},
];
export default sampleTimelineElements;

View File

@@ -72,8 +72,87 @@ const VideoEditor = ({ width, height }) => {
showConsoleLogs && console.log('Loaded sample timeline');
setupVideos();
setupImages(); // Add image setup
});
};
// ✅ NEW: Setup function for image elements
const setupImages = () => {
showConsoleLogs && console.log('setupImages');
const elements = timelineElementsRef.current;
if (elements.length === 0) {
console.log('No timeline elements to setup images for');
return;
}
const imageElementsData = elements.filter((el) => el.type === 'image');
console.log('Found', imageElementsData.length, 'image elements');
imageElementsData.forEach((element) => {
console.log('Creating image element for:', element.id);
const img = new Image();
img.crossOrigin = 'anonymous';
img.src = element.source;
img.onload = () => {
console.log('Image loaded for:', element.id);
const maxWidth = dimensions.width;
const maxHeight = dimensions.height;
const imgWidth = img.naturalWidth;
const imgHeight = img.naturalHeight;
let scaledWidth = imgWidth;
let scaledHeight = imgHeight;
// Scale down if image is larger than canvas
if (imgWidth > maxWidth || imgHeight > maxHeight) {
const scaleX = maxWidth / imgWidth;
const scaleY = maxHeight / imgHeight;
const scale = Math.min(scaleX, scaleY);
scaledWidth = imgWidth * scale;
scaledHeight = imgHeight * scale;
}
// Use provided position or center the image
const centeredX = element.x || (maxWidth - scaledWidth) / 2;
const centeredY = element.y || (maxHeight - scaledHeight) / 2;
setTimelineElements((prev) =>
prev.map((el) => {
if (el.id === element.id && el.type === 'image') {
return {
...el,
x: centeredX,
y: centeredY,
width: element.width || scaledWidth,
height: element.height || scaledHeight,
imageElement: img,
isImageReady: true,
};
}
return el;
}),
);
setLoadedVideos((prev) => {
const newSet = new Set(prev);
newSet.add(element.id);
console.log('Image loaded:', element.id, 'Total loaded:', newSet.size);
return newSet;
});
};
img.onerror = (e) => {
console.error(`Error loading image ${element.id}:`, e);
};
});
};
// ✅ FIX 3: Auto-update status when videos load
useEffect(() => {
setupVideoStatus();
@@ -232,11 +311,12 @@ const VideoEditor = ({ width, height }) => {
};
const setupVideoStatus = () => {
const videoCount = timelineElements.filter((el) => el.type === 'video').length;
if (loadedVideos.size === videoCount && videoCount > 0) {
// Update to count both videos and images
const mediaCount = timelineElements.filter((el) => el.type === 'video' || el.type === 'image').length;
if (loadedVideos.size === mediaCount && mediaCount > 0) {
setStatus('Ready to play');
} else if (videoCount > 0) {
setStatus(`Loading videos... (${loadedVideos.size}/${videoCount})`);
} else if (mediaCount > 0) {
setStatus(`Loading media... (${loadedVideos.size}/${mediaCount})`);
} else {
setStatus('Ready to play');
}

View File

@@ -14,11 +14,13 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
showConsoleLogs && console.log('🎬 STARTING FFmpeg generation');
const videos = timelineElements.filter((el) => el.type === 'video');
const images = timelineElements.filter((el) => el.type === 'image');
const texts = timelineElements.filter((el) => el.type === 'text');
showConsoleLogs && console.log('Videos found:', videos.length);
showConsoleLogs && console.log('Images found:', images.length);
if (videos.length === 0) {
if (videos.length === 0 && images.length === 0) {
if (is_string) {
return 'ffmpeg -f lavfi -i color=black:size=450x800:duration=1 -c:v libx264 -t 1 output.mp4';
} else {
@@ -27,17 +29,31 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
}
let inputArgs = [];
let inputIndex = 0;
// Add video inputs
videos.forEach((v, i) => {
inputArgs.push('-i');
inputArgs.push(useLocalFiles ? `input${i}.webm` : v.source);
inputArgs.push(useLocalFiles ? `input_video_${i}.webm` : v.source_webm);
inputIndex++;
});
// Add image inputs with loop and duration
images.forEach((img, i) => {
inputArgs.push('-loop', '1', '-t', img.duration.toString(), '-i');
inputArgs.push(useLocalFiles ? `input_image_${i}.jpg` : img.source);
inputIndex++;
});
let filters = [];
filters.push(`color=black:size=${dimensions.width}x${dimensions.height}:duration=${totalDuration}[base]`);
let videoLayer = 'base';
let currentInputIndex = 0;
// Process video elements
videos.forEach((v, i) => {
filters.push(`[${i}:v]trim=start=${v.inPoint}:duration=${v.duration},setpts=PTS-STARTPTS[v${i}_trim]`);
filters.push(`[${currentInputIndex}:v]trim=start=${v.inPoint}:duration=${v.duration},setpts=PTS-STARTPTS[v${i}_trim]`);
filters.push(`[v${i}_trim]scale=${Math.round(v.width)}:${Math.round(v.height)}[v${i}_scale]`);
filters.push(
`[${videoLayer}][v${i}_scale]overlay=${Math.round(v.x)}:${Math.round(v.y)}:enable='between(t,${v.startTime},${
@@ -45,6 +61,20 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
})'[v${i}_out]`,
);
videoLayer = `v${i}_out`;
currentInputIndex++;
});
// Process image elements
images.forEach((img, i) => {
const imgInputIndex = currentInputIndex;
filters.push(`[${imgInputIndex}:v]scale=${Math.round(img.width)}:${Math.round(img.height)}[img${i}_scale]`);
filters.push(
`[${videoLayer}][img${i}_scale]overlay=${Math.round(img.x)}:${Math.round(img.y)}:enable='between(t,${img.startTime},${
img.startTime + img.duration
})'[img${i}_out]`,
);
videoLayer = `img${i}_out`;
currentInputIndex++;
});
showConsoleLogs && console.log('🎵 PROCESSING AUDIO FOR', videos.length, 'VIDEOS');
@@ -103,7 +133,18 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
];
if (is_string) {
const inputs = videos.map((v, i) => `-i "${useLocalFiles ? `input${i}.webm` : v.source}"`).join(' ');
let inputStrings = [];
let inputIdx = 0;
videos.forEach((v, i) => {
inputStrings.push(`-i "${useLocalFiles ? `input_video_${i}.webm` : v.source_webm}"`);
});
images.forEach((img, i) => {
inputStrings.push(`-loop 1 -t ${img.duration} -i "${useLocalFiles ? `input_image_${i}.jpg` : img.source}"`);
});
const inputs = inputStrings.join(' ');
const audioMap = audioArgs.length > 0 ? ` ${audioArgs.join(' ')}` : '';
const command = `ffmpeg ${inputs} -filter_complex "${filterComplex}" -map "[${videoLayer}]"${audioMap} -c:v libx264 -pix_fmt yuv420p -r 30 -t ${totalDuration} output.mp4`;
@@ -172,12 +213,25 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
showConsoleLogs && console.log('Font loaded!');
setExportProgress(30);
setExportStatus('Downloading videos...');
setExportStatus('Downloading media...');
const videos = timelineElements.filter((el) => el.type === 'video');
const images = timelineElements.filter((el) => el.type === 'image');
const totalMedia = videos.length + images.length;
let mediaProgress = 0;
// Download videos
for (let i = 0; i < videos.length; i++) {
await ffmpeg.writeFile(`input${i}.webm`, await fetchFile(videos[i].source));
setExportProgress(30 + Math.round(((i + 1) / videos.length) * 30));
await ffmpeg.writeFile(`input_video_${i}.webm`, await fetchFile(videos[i].source_webm));
mediaProgress++;
setExportProgress(30 + Math.round((mediaProgress / totalMedia) * 30));
}
// Download images
for (let i = 0; i < images.length; i++) {
await ffmpeg.writeFile(`input_image_${i}.jpg`, await fetchFile(images[i].source));
mediaProgress++;
setExportProgress(30 + Math.round((mediaProgress / totalMedia) * 30));
}
setExportStatus('Processing video...');

View File

@@ -91,7 +91,7 @@ const VideoPreview = ({
draggable
/>
);
} else if (element.type === 'image' && element.imageElement) {
} else if (element.type === 'image' && element.imageElement && element.isImageReady) {
return (
<Image
key={element.id}