diff --git a/resources/js/modules/editor/partials/canvas/video-editor.jsx b/resources/js/modules/editor/partials/canvas/video-editor.jsx index c0811ec..abb40d2 100644 --- a/resources/js/modules/editor/partials/canvas/video-editor.jsx +++ b/resources/js/modules/editor/partials/canvas/video-editor.jsx @@ -25,8 +25,8 @@ const VideoEditor = ({ width, height }) => { duration: 5, x: 50, y: 50, - width: 300, // Will be updated when video loads - height: 200, // Will be updated when video loads + width: 300, + height: 200, }, { id: '2', @@ -41,8 +41,8 @@ const VideoEditor = ({ width, height }) => { duration: 4, x: 100, y: 100, - width: 250, // Will be updated when video loads - height: 150, // Will be updated when video loads + width: 250, + height: 150, }, { id: '3', @@ -57,8 +57,8 @@ const VideoEditor = ({ width, height }) => { duration: 6, x: 200, y: 200, - width: 280, // Will be updated when video loads - height: 180, // Will be updated when video loads + width: 280, + height: 180, }, { id: '4', @@ -91,10 +91,7 @@ const VideoEditor = ({ width, height }) => { ]); const lastUpdateRef = useRef(0); - - // FFmpeg WASM states const ffmpegRef = useRef(new FFmpeg()); - const emitter = useMitt(); const [isExporting, setIsExporting] = useState(false); @@ -106,8 +103,6 @@ const VideoEditor = ({ width, height }) => { const [videoElements, setVideoElements] = useState({}); const [loadedVideos, setLoadedVideos] = useState(new Set()); const [status, setStatus] = useState('Loading videos...'); - - // Track which videos should be playing - this is the key optimization const [videoStates, setVideoStates] = useState({}); const animationRef = useRef(null); @@ -119,12 +114,10 @@ const VideoEditor = ({ width, height }) => { useEffect(() => { setVideoIsPlaying(isPlaying); - }, [isPlaying]); + }, [isPlaying, setVideoIsPlaying]); - // Calculate total timeline duration const totalDuration = Math.max(...timelineElements.map((el) => el.startTime + el.duration)); - // Generate FFmpeg command - COMPLETE VERSION const generateFFmpegCommand = useCallback( (is_string = true, useLocalFiles = false) => { console.log('🎬 STARTING FFmpeg generation'); @@ -142,20 +135,15 @@ const VideoEditor = ({ width, height }) => { } } - // Build inputs let inputArgs = []; videos.forEach((v, i) => { inputArgs.push('-i'); inputArgs.push(useLocalFiles ? `input${i}.webm` : v.source); }); - // Build filter parts array let filters = []; - - // Base canvas filters.push(`color=black:size=${dimensions.width}x${dimensions.height}:duration=${totalDuration}[base]`); - // Process video streams let videoLayer = 'base'; videos.forEach((v, i) => { filters.push(`[${i}:v]trim=start=${v.inPoint}:duration=${v.duration},setpts=PTS-STARTPTS[v${i}_trim]`); @@ -168,7 +156,6 @@ const VideoEditor = ({ width, height }) => { videoLayer = `v${i}_out`; }); - // AUDIO PROCESSING - EXPLICIT AND COMPLETE console.log('🎵 PROCESSING AUDIO FOR', videos.length, 'VIDEOS'); let audioOutputs = []; @@ -179,7 +166,6 @@ const VideoEditor = ({ width, height }) => { audioOutputs.push(`[a${i}]`); }); - // Audio mixing let audioArgs = []; if (audioOutputs.length === 1) { filters.push(`[a0]apad=pad_dur=${totalDuration}[audio_final]`); @@ -191,7 +177,6 @@ const VideoEditor = ({ width, height }) => { console.log('🎵 Audio args:', audioArgs); - // Add text overlays texts.forEach((t, i) => { const escapedText = t.text.replace(/'/g, is_string ? "\\'" : "'").replace(/:/g, '\\:'); @@ -204,11 +189,10 @@ const VideoEditor = ({ width, height }) => { ); videoLayer = `t${i}`; }); - // Join all filter parts + const filterComplex = filters.join('; '); console.log('🎵 Filter includes atrim:', filterComplex.includes('atrim')); - // Build final arguments const finalArgs = [ ...inputArgs, '-filter_complex', @@ -228,7 +212,6 @@ const VideoEditor = ({ width, height }) => { ]; if (is_string) { - // Build final command string const inputs = videos.map((v, i) => `-i "${useLocalFiles ? `input${i}.webm` : v.source}"`).join(' '); const audioMap = audioArgs.length > 0 ? ` ${audioArgs.join(' ')}` : ''; const command = `ffmpeg ${inputs} -filter_complex "${filterComplex}" -map "[${videoLayer}]"${audioMap} -c:v libx264 -pix_fmt yuv420p -r 30 -t ${totalDuration} output.mp4`; @@ -245,20 +228,16 @@ const VideoEditor = ({ width, height }) => { [timelineElements, dimensions, totalDuration], ); - // Memoize the FFmpeg command const ffmpegCommand = useMemo(() => { return generateFFmpegCommand(true, false); }, [generateFFmpegCommand]); - // Memoize the copy function const copyFFmpegCommand = useCallback(() => { console.log('🎬 FFMPEG COMMAND GENERATED:'); console.log('Command:', ffmpegCommand); navigator.clipboard.writeText(ffmpegCommand); }, [ffmpegCommand]); - // Create video elements - // Replace your existing useEffect with this: useEffect(() => { const videoEls = {}; const videoElementsData = timelineElements.filter((el) => el.type === 'video'); @@ -282,13 +261,11 @@ const VideoEditor = ({ width, height }) => { video.appendChild(sourceMov); video.appendChild(sourceWebM); - // Load poster separately const posterImg = new Image(); posterImg.crossOrigin = 'anonymous'; posterImg.src = element.poster; posterImg.onload = () => { - // Calculate scaling for poster const maxWidth = dimensions.width; const maxHeight = dimensions.height; const posterWidth = posterImg.naturalWidth; @@ -309,7 +286,6 @@ const VideoEditor = ({ width, height }) => { const centeredX = (maxWidth - scaledWidth) / 2; const centeredY = (maxHeight - scaledHeight) / 2; - // Update timeline element with poster setTimelineElements((prev) => prev.map((el) => { if (el.id === element.id && el.type === 'video') { @@ -319,8 +295,8 @@ const VideoEditor = ({ width, height }) => { y: centeredY, width: scaledWidth, height: scaledHeight, - posterImage: posterImg, // Store poster reference - isVideoPoster: true, // Flag to indicate poster is loaded + posterImage: posterImg, + isVideoPoster: true, }; } return el; @@ -335,14 +311,13 @@ const VideoEditor = ({ width, height }) => { }; video.addEventListener('loadedmetadata', () => { - // Video metadata loaded - store video reference setTimelineElements((prev) => prev.map((el) => { if (el.id === element.id && el.type === 'video') { return { ...el, - videoElement: video, // Store video reference - isVideoReady: true, // Flag to indicate video is ready + videoElement: video, + isVideoReady: true, }; } return el; @@ -369,9 +344,8 @@ const VideoEditor = ({ width, height }) => { video.load(); }); }; - }, [dimensions.width, dimensions.height]); // Add dimensions as dependency + }, [dimensions.width, dimensions.height]); - // Update status when videos load useEffect(() => { const videoCount = timelineElements.filter((el) => el.type === 'video').length; if (loadedVideos.size === videoCount && videoCount > 0) { @@ -383,12 +357,12 @@ const VideoEditor = ({ width, height }) => { } }, [loadedVideos, timelineElements]); + // FIXED: Removed currentTime dependency to prevent excessive recreation const handlePause = useCallback(() => { if (isPlaying) { setIsPlaying(false); pausedTimeRef.current = currentTime; - // Pause and mute all videos when pausing timeline Object.values(videoElements).forEach((video) => { if (!video.paused) { video.pause(); @@ -396,14 +370,14 @@ const VideoEditor = ({ width, height }) => { video.muted = true; }); - // Reset video states tracking setVideoStates({}); if (animationRef.current) { animationRef.current.stop(); + animationRef.current = null; } } - }, [isPlaying, currentTime, videoElements]); + }, [isPlaying, videoElements]); const exportVideo = async () => { setIsExporting(true); @@ -443,13 +417,11 @@ const VideoEditor = ({ width, height }) => { console.log('FFmpeg loaded!'); setExportProgress(20); - // Write arial.ttf font into FFmpeg FS (fetch from GitHub) setExportStatus('Loading font...'); await ffmpeg.writeFile('arial.ttf', await fetchFile('https://raw.githubusercontent.com/ffmpegwasm/testdata/master/arial.ttf')); console.log('Font loaded!'); setExportProgress(30); - // Download videos setExportStatus('Downloading videos...'); const videos = timelineElements.filter((el) => el.type === 'video'); @@ -458,18 +430,12 @@ const VideoEditor = ({ width, height }) => { setExportProgress(30 + Math.round(((i + 1) / videos.length) * 30)); } - // Generate your FFmpeg command, but be sure to include fontfile=/arial.ttf in all drawtext filters setExportStatus('Processing video...'); let args = generateFFmpegCommand(false, true); - // Example: if your command uses drawtext filters, add fontfile argument like: - // drawtext=fontfile=/arial.ttf:text='Your text':x=50:y=600:fontsize=24:fontcolor=white:borderw=1:bordercolor=black - // Make sure your generateFFmpegCommand function inserts this correctly. - setExportProgress(70); await ffmpeg.exec(args); - // Download result setExportStatus('Downloading...'); setExportProgress(90); @@ -501,7 +467,6 @@ const VideoEditor = ({ width, height }) => { } }; - // Get currently active elements based on timeline position const getActiveElements = useCallback( (time) => { return timelineElements.filter((element) => { @@ -512,10 +477,8 @@ const VideoEditor = ({ width, height }) => { [timelineElements], ); - // Calculate which videos should be playing based on current time const getDesiredVideoStates = useCallback( (time) => { - // Accept time as parameter const states = {}; timelineElements.forEach((element) => { if (element.type === 'video') { @@ -525,10 +488,9 @@ const VideoEditor = ({ width, height }) => { }); return states; }, - [timelineElements], // Removed dependency on currentTime + [timelineElements], ); - // Update video times based on timeline position - optimized to reduce seeking const updateVideoTimes = useCallback( (time) => { timelineElements.forEach((element) => { @@ -540,7 +502,6 @@ const VideoEditor = ({ width, height }) => { const relativeTime = time - element.startTime; const videoTime = element.inPoint + relativeTime; - // Only seek if time difference is significant if (Math.abs(video.currentTime - videoTime) > 0.5) { video.currentTime = videoTime; } @@ -551,13 +512,11 @@ const VideoEditor = ({ width, height }) => { [timelineElements, videoElements], ); - // OPTIMIZED: Manage video play/pause states only when needed useEffect(() => { if (!isPlaying) return; const desiredStates = getDesiredVideoStates(currentTime); - // Smarter play/pause without excessive updates Object.entries(desiredStates).forEach(([videoId, shouldPlay]) => { const video = videoElements[videoId]; const isCurrentlyPlaying = !video?.paused; @@ -576,137 +535,140 @@ const VideoEditor = ({ width, height }) => { setVideoStates(desiredStates); }, [currentTime, isPlaying, videoElements, getDesiredVideoStates]); - const animate = useCallback(() => { - if (!animationRef.current || !isPlaying) return; - - const now = Date.now() / 1000; - const newTime = pausedTimeRef.current + (now - startTimeRef.current); - - if (newTime >= totalDuration) { - handlePause(); - handleSeek(0); // ⬅️ Reset timeline + // FIXED: Properly stop animation when not playing + useEffect(() => { + if (!isPlaying) { + if (animationRef.current) { + animationRef.current.stop(); + animationRef.current = null; + } return; } - if (newTime - lastUpdateRef.current >= 0.05) { - lastUpdateRef.current = newTime; - setCurrentTime(newTime); - updateVideoTimes(newTime); + let animationId; + let isRunning = true; - if (layerRef.current) { - layerRef.current.batchDraw(); + const animateFrame = () => { + if (!isRunning) return; + + const now = Date.now() / 1000; + const newTime = pausedTimeRef.current + (now - startTimeRef.current); + + if (newTime >= totalDuration) { + handlePause(); + handleSeek(0); + return; } - } - }, [isPlaying, totalDuration, updateVideoTimes, handlePause]); - // Start animation loop - using requestAnimationFrame for better performance - useEffect(() => { - if (isPlaying) { - let animationId; + if (newTime - lastUpdateRef.current >= 0.05) { + lastUpdateRef.current = newTime; + setCurrentTime(newTime); + updateVideoTimes(newTime); - const animateFrame = () => { - animate(); - animationId = requestAnimationFrame(animateFrame); - }; - - animationId = requestAnimationFrame(animateFrame); - animationRef.current = { stop: () => cancelAnimationFrame(animationId) }; - - return () => { - if (animationRef.current) { - animationRef.current.stop(); + if (layerRef.current) { + layerRef.current.batchDraw(); } - }; - } - }, [isPlaying, animate]); + } - const handlePlay = () => { + if (isRunning) { + animationId = requestAnimationFrame(animateFrame); + } + }; + + startTimeRef.current = Date.now() / 1000; + animationId = requestAnimationFrame(animateFrame); + + animationRef.current = { + stop: () => { + isRunning = false; + if (animationId) { + cancelAnimationFrame(animationId); + } + }, + }; + + return () => { + isRunning = false; + if (animationId) { + cancelAnimationFrame(animationId); + } + }; + }, [isPlaying, totalDuration, handlePause, updateVideoTimes]); + + // FIXED: Stabilized handlers + const handlePlay = useCallback(() => { if (!isPlaying) { setIsPlaying(true); startTimeRef.current = Date.now() / 1000; - lastUpdateRef.current = 0; // ✅ Reset debounce tracker + lastUpdateRef.current = 0; setStatus(''); } - }; + }, [isPlaying]); - const handleSeek = (time) => { - const clampedTime = Math.max(0, Math.min(time, totalDuration)); - setCurrentTime(clampedTime); - pausedTimeRef.current = clampedTime; - updateVideoTimes(clampedTime); + const handleSeek = useCallback( + (time) => { + const clampedTime = Math.max(0, Math.min(time, totalDuration)); + setCurrentTime(clampedTime); + pausedTimeRef.current = clampedTime; + updateVideoTimes(clampedTime); - // Reset video states when seeking to force re-evaluation - setVideoStates({}); + setVideoStates({}); - if (layerRef.current) { - layerRef.current.draw(); - } - }; + if (layerRef.current) { + layerRef.current.draw(); + } + }, + [totalDuration, updateVideoTimes], + ); - const handleReset = () => { + const handleReset = useCallback(() => { handlePause(); handleSeek(0); - lastUpdateRef.current = 0; // ✅ Reset debounce tracker + lastUpdateRef.current = 0; - // Ensure all videos are muted Object.values(videoElements).forEach((video) => { video.muted = true; }); - }; + }, [handlePause, handleSeek, videoElements]); const activeElements = getActiveElements(currentTime); + // FIXED: Added missing dependencies to event listeners useEffect(() => { - emitter.on('video-play', () => { - console.log('video-play'); - handlePlay(); - }); - - emitter.on('video-pause', () => { - console.log('video-pause'); - handlePause(); - }); - - emitter.on('video-seek', (time) => { - handleSeek(time); - }); + emitter.on('video-play', handlePlay); + emitter.on('video-reset', handleReset); + emitter.on('video-seek', handleSeek); return () => { - emitter.off('video-play'); - emitter.off('video-pause'); - emitter.off('video-seek'); + emitter.off('video-play', handlePlay); + emitter.off('video-reset', handleReset); + emitter.off('video-seek', handleSeek); }; - }, [emitter]); + }, [emitter, handlePlay, handleReset, handleSeek]); return (