Update
This commit is contained in:
@@ -8,6 +8,8 @@ import useVideoExport from './video-export';
|
||||
import VideoPreview from './video-preview';
|
||||
|
||||
const VideoEditor = ({ width, height }) => {
|
||||
const [showConsoleLogs] = useState(true);
|
||||
|
||||
const [dimensions] = useState({
|
||||
width: width,
|
||||
height: height,
|
||||
@@ -15,6 +17,9 @@ const VideoEditor = ({ width, height }) => {
|
||||
|
||||
const [timelineElements, setTimelineElements] = useState([]);
|
||||
|
||||
// 🔧 FIX: Add ref to solve closure issue
|
||||
const timelineElementsRef = useRef([]);
|
||||
|
||||
const lastUpdateRef = useRef(0);
|
||||
const emitter = useMitt();
|
||||
|
||||
@@ -32,10 +37,28 @@ const VideoEditor = ({ width, height }) => {
|
||||
|
||||
const { setVideoIsPlaying } = useVideoEditorStore();
|
||||
|
||||
// 🔧 FIX: Keep ref synced with state
|
||||
useEffect(() => {
|
||||
setTimeout(() => setTimelineElements(sampleTimelineElements), 1000);
|
||||
timelineElementsRef.current = timelineElements;
|
||||
}, [timelineElements]);
|
||||
|
||||
// ✅ FIX 1: Use useEffect to automatically setup videos when timeline loads
|
||||
useEffect(() => {
|
||||
setTimeout(() => {
|
||||
setTimelineElements(sampleTimelineElements);
|
||||
showConsoleLogs && console.log('Loaded sample timeline');
|
||||
|
||||
setTimeout(() => {
|
||||
setupVideos();
|
||||
}, 1000);
|
||||
}, 1000);
|
||||
}, []);
|
||||
|
||||
// ✅ FIX 3: Auto-update status when videos load
|
||||
useEffect(() => {
|
||||
setupVideoStatus();
|
||||
}, [timelineElements, loadedVideos]);
|
||||
|
||||
useEffect(() => {
|
||||
setVideoIsPlaying(isPlaying);
|
||||
}, [isPlaying, setVideoIsPlaying]);
|
||||
@@ -49,11 +72,27 @@ const VideoEditor = ({ width, height }) => {
|
||||
totalDuration,
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
const setupVideos = () => {
|
||||
showConsoleLogs && console.log('setupVideos');
|
||||
|
||||
// 🔧 FIX: Read from ref instead of state to get latest data
|
||||
const elements = timelineElementsRef.current;
|
||||
|
||||
if (elements.length === 0) {
|
||||
console.log('No timeline elements to setup videos for');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Setting up videos for', elements.length, 'timeline elements');
|
||||
|
||||
const videoEls = {};
|
||||
const videoElementsData = timelineElements.filter((el) => el.type === 'video');
|
||||
const videoElementsData = elements.filter((el) => el.type === 'video');
|
||||
|
||||
console.log('Found', videoElementsData.length, 'video elements');
|
||||
|
||||
videoElementsData.forEach((element) => {
|
||||
console.log('Creating video element for:', element.id);
|
||||
|
||||
const video = document.createElement('video');
|
||||
video.crossOrigin = 'anonymous';
|
||||
video.muted = true;
|
||||
@@ -77,6 +116,8 @@ const VideoEditor = ({ width, height }) => {
|
||||
posterImg.src = element.poster;
|
||||
|
||||
posterImg.onload = () => {
|
||||
console.log('Poster loaded for:', element.id);
|
||||
|
||||
const maxWidth = dimensions.width;
|
||||
const maxHeight = dimensions.height;
|
||||
const posterWidth = posterImg.naturalWidth;
|
||||
@@ -117,11 +158,14 @@ const VideoEditor = ({ width, height }) => {
|
||||
setLoadedVideos((prev) => {
|
||||
const newSet = new Set(prev);
|
||||
newSet.add(element.id);
|
||||
console.log('Video loaded:', element.id, 'Total loaded:', newSet.size);
|
||||
return newSet;
|
||||
});
|
||||
};
|
||||
|
||||
video.addEventListener('loadedmetadata', () => {
|
||||
console.log('Video metadata loaded for:', element.id);
|
||||
|
||||
setTimelineElements((prev) =>
|
||||
prev.map((el) => {
|
||||
if (el.id === element.id && el.type === 'video') {
|
||||
@@ -147,17 +191,27 @@ const VideoEditor = ({ width, height }) => {
|
||||
videoEls[element.id] = video;
|
||||
});
|
||||
|
||||
console.log('Setting video elements:', Object.keys(videoEls));
|
||||
setVideoElements(videoEls);
|
||||
};
|
||||
|
||||
return () => {
|
||||
Object.values(videoEls).forEach((video) => {
|
||||
const cleanupVideos = (videosToCleanup) => {
|
||||
if (!videosToCleanup) return;
|
||||
|
||||
const videoArray = Array.isArray(videosToCleanup) ? videosToCleanup : Object.values(videosToCleanup);
|
||||
|
||||
videoArray.forEach((video) => {
|
||||
if (video && video.src) {
|
||||
if (!video.paused) video.pause();
|
||||
video.src = '';
|
||||
video.load();
|
||||
});
|
||||
};
|
||||
}, []);
|
||||
video.removeEventListener('loadedmetadata', video._metadataHandler);
|
||||
video.removeEventListener('error', video._errorHandler);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const setupVideoStatus = () => {
|
||||
const videoCount = timelineElements.filter((el) => el.type === 'video').length;
|
||||
if (loadedVideos.size === videoCount && videoCount > 0) {
|
||||
setStatus('Ready to play');
|
||||
@@ -166,7 +220,7 @@ const VideoEditor = ({ width, height }) => {
|
||||
} else {
|
||||
setStatus('Ready to play');
|
||||
}
|
||||
}, [loadedVideos, timelineElements]);
|
||||
};
|
||||
|
||||
// FIXED: Removed currentTime dependency to prevent excessive recreation
|
||||
const handlePause = useCallback(() => {
|
||||
|
||||
@@ -3,6 +3,8 @@ import { fetchFile, toBlobURL } from '@ffmpeg/util';
|
||||
import { useCallback, useMemo, useRef, useState } from 'react';
|
||||
|
||||
const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
|
||||
const [showConsoleLogs] = useState(false);
|
||||
|
||||
const ffmpegRef = useRef(new FFmpeg());
|
||||
|
||||
const [isExporting, setIsExporting] = useState(false);
|
||||
@@ -11,12 +13,12 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
|
||||
|
||||
const generateFFmpegCommand = useCallback(
|
||||
(is_string = true, useLocalFiles = false) => {
|
||||
console.log('🎬 STARTING FFmpeg generation');
|
||||
showConsoleLogs && console.log('🎬 STARTING FFmpeg generation');
|
||||
|
||||
const videos = timelineElements.filter((el) => el.type === 'video');
|
||||
const texts = timelineElements.filter((el) => el.type === 'text');
|
||||
|
||||
console.log('Videos found:', videos.length);
|
||||
showConsoleLogs && console.log('Videos found:', videos.length);
|
||||
|
||||
if (videos.length === 0) {
|
||||
if (is_string) {
|
||||
@@ -47,12 +49,12 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
|
||||
videoLayer = `v${i}_out`;
|
||||
});
|
||||
|
||||
console.log('🎵 PROCESSING AUDIO FOR', videos.length, 'VIDEOS');
|
||||
showConsoleLogs && console.log('🎵 PROCESSING AUDIO FOR', videos.length, 'VIDEOS');
|
||||
|
||||
let audioOutputs = [];
|
||||
videos.forEach((v, i) => {
|
||||
const delay = Math.round(v.startTime * 1000);
|
||||
console.log(`🎵 Audio ${i}: delay=${delay}ms, inPoint=${v.inPoint}, duration=${v.duration}`);
|
||||
showConsoleLogs && console.log(`🎵 Audio ${i}: delay=${delay}ms, inPoint=${v.inPoint}, duration=${v.duration}`);
|
||||
filters.push(`[${i}:a]atrim=start=${v.inPoint}:duration=${v.duration},asetpts=PTS-STARTPTS,adelay=${delay}|${delay}[a${i}]`);
|
||||
audioOutputs.push(`[a${i}]`);
|
||||
});
|
||||
@@ -66,7 +68,7 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
|
||||
audioArgs = ['-map', '[audio_final]', '-c:a', 'aac'];
|
||||
}
|
||||
|
||||
console.log('🎵 Audio args:', audioArgs);
|
||||
showConsoleLogs && console.log('🎵 Audio args:', audioArgs);
|
||||
|
||||
texts.forEach((t, i) => {
|
||||
const escapedText = t.text.replace(/'/g, is_string ? "\\'" : "'").replace(/:/g, '\\:');
|
||||
@@ -82,7 +84,7 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
|
||||
});
|
||||
|
||||
const filterComplex = filters.join('; ');
|
||||
console.log('🎵 Filter includes atrim:', filterComplex.includes('atrim'));
|
||||
showConsoleLogs && console.log('🎵 Filter includes atrim:', filterComplex.includes('atrim'));
|
||||
|
||||
const finalArgs = [
|
||||
...inputArgs,
|
||||
@@ -107,11 +109,11 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
|
||||
const audioMap = audioArgs.length > 0 ? ` ${audioArgs.join(' ')}` : '';
|
||||
const command = `ffmpeg ${inputs} -filter_complex "${filterComplex}" -map "[${videoLayer}]"${audioMap} -c:v libx264 -pix_fmt yuv420p -r 30 -t ${totalDuration} output.mp4`;
|
||||
|
||||
console.log('🎵 FINAL COMMAND HAS AUDIO:', command.includes('atrim') && command.includes('audio_final'));
|
||||
showConsoleLogs && console.log('🎵 FINAL COMMAND HAS AUDIO:', command.includes('atrim') && command.includes('audio_final'));
|
||||
|
||||
return command;
|
||||
} else {
|
||||
console.log('🎵 FINAL ARGS HAVE AUDIO:', finalArgs.includes('atrim') && finalArgs.includes('audio_final'));
|
||||
showConsoleLogs && console.log('🎵 FINAL ARGS HAVE AUDIO:', finalArgs.includes('atrim') && finalArgs.includes('audio_final'));
|
||||
|
||||
return finalArgs;
|
||||
}
|
||||
@@ -124,8 +126,8 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
|
||||
}, [generateFFmpegCommand]);
|
||||
|
||||
const copyFFmpegCommand = useCallback(() => {
|
||||
console.log('🎬 FFMPEG COMMAND GENERATED:');
|
||||
console.log('Command:', ffmpegCommand);
|
||||
showConsoleLogs && console.log('🎬 FFMPEG COMMAND GENERATED:');
|
||||
showConsoleLogs && console.log('Command:', ffmpegCommand);
|
||||
navigator.clipboard.writeText(ffmpegCommand);
|
||||
}, [ffmpegCommand]);
|
||||
|
||||
@@ -144,32 +146,32 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
|
||||
});
|
||||
|
||||
ffmpeg.on('log', ({ message }) => {
|
||||
console.log(message);
|
||||
showConsoleLogs && console.log(message);
|
||||
});
|
||||
|
||||
const baseURL = 'https://unpkg.com/@ffmpeg/core@0.12.6/dist/esm';
|
||||
const coreURL = `${baseURL}/ffmpeg-core.js`;
|
||||
const wasmURL = `${baseURL}/ffmpeg-core.wasm`;
|
||||
|
||||
console.log('Converting JS coreURL...');
|
||||
showConsoleLogs && console.log('Converting JS coreURL...');
|
||||
const coreBlobURL = await toBlobURL(coreURL, 'text/javascript');
|
||||
console.log('JS coreURL ready:', coreBlobURL);
|
||||
showConsoleLogs && console.log('JS coreURL ready:', coreBlobURL);
|
||||
|
||||
console.log('Converting WASM URL...');
|
||||
showConsoleLogs && console.log('Converting WASM URL...');
|
||||
const wasmBlobURL = await toBlobURL(wasmURL, 'application/wasm');
|
||||
console.log('WASM URL ready:', wasmBlobURL);
|
||||
showConsoleLogs && console.log('WASM URL ready:', wasmBlobURL);
|
||||
|
||||
console.log('Calling ffmpeg.load...');
|
||||
showConsoleLogs && console.log('Calling ffmpeg.load...');
|
||||
await ffmpeg.load({
|
||||
coreURL: coreBlobURL,
|
||||
wasmURL: wasmBlobURL,
|
||||
});
|
||||
console.log('FFmpeg loaded!');
|
||||
showConsoleLogs && console.log('FFmpeg loaded!');
|
||||
setExportProgress(20);
|
||||
|
||||
setExportStatus('Loading font...');
|
||||
await ffmpeg.writeFile('arial.ttf', await fetchFile('https://raw.githubusercontent.com/ffmpegwasm/testdata/master/arial.ttf'));
|
||||
console.log('Font loaded!');
|
||||
showConsoleLogs && console.log('Font loaded!');
|
||||
setExportProgress(30);
|
||||
|
||||
setExportStatus('Downloading videos...');
|
||||
|
||||
Reference in New Issue
Block a user