Files
memefast/resources/js/modules/editor/partials/canvas/video-editor.jsx
2025-06-16 13:37:57 +08:00

541 lines
18 KiB
JavaScript

// Updated video-editor.jsx to use selected meme and background
import { useMitt } from '@/plugins/MittContext';
import useMediaStore from '@/stores/MediaStore'; // Add this import
import useVideoEditorStore from '@/stores/VideoEditorStore';
import { useCallback, useEffect, useRef, useState } from 'react';
import useVideoExport from './video-export';
import VideoPreview from './video-preview';
const VideoEditor = ({ width, height }) => {
const [showConsoleLogs] = useState(true);
const [dimensions] = useState({
width: width,
height: height,
});
const [timelineElements, setTimelineElements] = useState([]);
// 🔧 FIX: Add ref to solve closure issue
const timelineElementsRef = useRef([]);
const lastUpdateRef = useRef(0);
const emitter = useMitt();
// Add MediaStore to get selected items
const { selectedMeme, selectedBackground } = useMediaStore();
const [currentTime, setCurrentTime] = useState(0);
const [isPlaying, setIsPlaying] = useState(false);
const [videoElements, setVideoElements] = useState({});
const [loadedVideos, setLoadedVideos] = useState(new Set());
const [status, setStatus] = useState('Loading videos...');
const [videoStates, setVideoStates] = useState({});
const animationRef = useRef(null);
const layerRef = useRef(null);
const startTimeRef = useRef(0);
const pausedTimeRef = useRef(0);
const { setVideoIsPlaying } = useVideoEditorStore();
const FPS_INTERVAL = 1000 / 30; // 30 FPS
// 🔧 FIX: Keep ref synced with state
useEffect(() => {
timelineElementsRef.current = timelineElements;
}, [timelineElements]);
// ✅ NEW: Create timeline from selected items
const createTimelineFromSelections = useCallback(() => {
const elements = [];
// If we have a selected meme, use its duration, otherwise default to 5 seconds
const memeDuration = selectedMeme?.duration ? parseFloat(selectedMeme.duration) : 5;
// Add background video if selected (base layer)
if (selectedBackground) {
elements.push({
id: `bg_${selectedBackground.ids}`,
type: 'video',
source_webm: selectedBackground.media_url, // Assuming it's webm
source_mov: selectedBackground.media_url, // You might need different URLs
poster: selectedBackground.media_url,
name: 'Background',
startTime: 0,
layer: 0,
inPoint: 0,
duration: memeDuration, // Match meme duration
x: 0,
y: 0,
width: dimensions.width,
height: dimensions.height,
});
}
// Add meme video if selected (overlay layer)
if (selectedMeme) {
elements.push({
id: `meme_${selectedMeme.ids}`,
type: 'video',
source_webm: selectedMeme.webm_url,
source_mov: selectedMeme.mov_url,
poster: selectedMeme.webp_url,
name: selectedMeme.name,
startTime: 0,
layer: 1,
inPoint: 0,
duration: memeDuration,
x: 50, // Offset slightly for overlay effect
y: 50,
width: dimensions.width - 100,
height: dimensions.height - 100,
});
}
showConsoleLogs && console.log('Created timeline from selections:', elements);
return elements;
}, [selectedMeme, selectedBackground, dimensions]);
const timelineUpdateResolverRef = useRef(null);
const setTimelineElementsAsync = useCallback((newElements) => {
return new Promise((resolve) => {
timelineUpdateResolverRef.current = resolve;
setTimelineElements(newElements);
});
}, []);
// Add this useEffect to resolve the promise when timeline updates
useEffect(() => {
if (timelineUpdateResolverRef.current && timelineElements.length >= 0) {
// Changed from > 0 to >= 0
timelineUpdateResolverRef.current();
timelineUpdateResolverRef.current = null;
}
}, [timelineElements]);
// ✅ UPDATED: Use selections instead of sample data
const initTimeline = useCallback(() => {
cleanupVideos(videoElements);
const newElements = createTimelineFromSelections();
setTimelineElementsAsync(newElements).then(() => {
showConsoleLogs && console.log('Loaded timeline from selections');
setupVideos();
});
}, [createTimelineFromSelections, videoElements]);
// ✅ NEW: Watch for changes in selected items and update timeline
useEffect(() => {
initTimeline();
}, [selectedMeme, selectedBackground]); // Re-run when selections change
// ✅ FIX 3: Auto-update status when videos load
useEffect(() => {
setupVideoStatus();
}, [timelineElements, loadedVideos]);
useEffect(() => {
setVideoIsPlaying(isPlaying);
}, [isPlaying, setVideoIsPlaying]);
// ✅ UPDATED: Calculate duration from actual timeline
const totalDuration = timelineElements.length > 0 ? Math.max(...timelineElements.map((el) => el.startTime + el.duration)) : 5; // Default fallback
// Use the FFmpeg hook
const { isExporting, exportProgress, exportStatus, ffmpegCommand, copyFFmpegCommand, exportVideo } = useVideoExport({
timelineElements,
dimensions,
totalDuration,
});
const setupVideos = () => {
showConsoleLogs && console.log('setupVideos');
// 🔧 FIX: Read from ref instead of state to get latest data
const elements = timelineElementsRef.current;
if (elements.length === 0) {
console.log('No timeline elements to setup videos for');
return;
}
console.log('Setting up videos for', elements.length, 'timeline elements');
const videoEls = {};
const videoElementsData = elements.filter((el) => el.type === 'video');
console.log('Found', videoElementsData.length, 'video elements');
videoElementsData.forEach((element) => {
console.log('Creating video element for:', element.id);
const video = document.createElement('video');
video.crossOrigin = 'anonymous';
video.muted = true;
video.preload = 'metadata';
video.playsInline = true;
video.controls = false;
const sourceWebM = document.createElement('source');
sourceWebM.src = element.source_webm;
sourceWebM.type = 'video/webm; codecs=vp09.00.41.08';
const sourceMov = document.createElement('source');
sourceMov.src = element.source_mov;
sourceMov.type = 'video/quicktime; codecs=hvc1.1.6.H120.b0';
video.appendChild(sourceMov);
video.appendChild(sourceWebM);
const posterImg = new Image();
posterImg.crossOrigin = 'anonymous';
posterImg.src = element.poster;
posterImg.onload = () => {
console.log('Poster loaded for:', element.id);
const maxWidth = dimensions.width;
const maxHeight = dimensions.height;
const posterWidth = posterImg.naturalWidth;
const posterHeight = posterImg.naturalHeight;
let scaledWidth = posterWidth;
let scaledHeight = posterHeight;
if (posterWidth > maxWidth || posterHeight > maxHeight) {
const scaleX = maxWidth / posterWidth;
const scaleY = maxHeight / posterHeight;
const scale = Math.min(scaleX, scaleY);
scaledWidth = posterWidth * scale;
scaledHeight = posterHeight * scale;
}
// ✅ UPDATED: Different positioning for background vs meme
let centeredX, centeredY;
if (element.id.startsWith('bg_')) {
// Background should fill the canvas
centeredX = 0;
centeredY = 0;
scaledWidth = maxWidth;
scaledHeight = maxHeight;
} else {
// Meme should be centered
centeredX = (maxWidth - scaledWidth) / 2;
centeredY = (maxHeight - scaledHeight) / 2;
}
setTimelineElements((prev) =>
prev.map((el) => {
if (el.id === element.id && el.type === 'video') {
return {
...el,
x: centeredX,
y: centeredY,
width: scaledWidth,
height: scaledHeight,
posterImage: posterImg,
isVideoPoster: true,
};
}
return el;
}),
);
setLoadedVideos((prev) => {
const newSet = new Set(prev);
newSet.add(element.id);
console.log('Video loaded:', element.id, 'Total loaded:', newSet.size);
return newSet;
});
};
video.addEventListener('loadedmetadata', () => {
console.log('Video metadata loaded for:', element.id);
setTimelineElements((prev) =>
prev.map((el) => {
if (el.id === element.id && el.type === 'video') {
return {
...el,
videoElement: video,
isVideoReady: true,
};
}
return el;
}),
);
});
video.addEventListener('error', (e) => {
console.error(`Error loading video ${element.id}:`, e);
});
posterImg.onerror = (e) => {
console.error(`Error loading poster ${element.id}:`, e);
};
videoEls[element.id] = video;
});
console.log('Setting video elements:', Object.keys(videoEls));
setVideoElements(videoEls);
};
// Rest of the component remains the same...
const cleanupVideos = (videosToCleanup) => {
if (!videosToCleanup) return;
const videoArray = Array.isArray(videosToCleanup) ? videosToCleanup : Object.values(videosToCleanup);
videoArray.forEach((video) => {
if (video && video.src) {
if (!video.paused) video.pause();
video.src = '';
video.load();
video.removeEventListener('loadedmetadata', video._metadataHandler);
video.removeEventListener('error', video._errorHandler);
}
});
};
const setupVideoStatus = () => {
const videoCount = timelineElements.filter((el) => el.type === 'video').length;
if (loadedVideos.size === videoCount && videoCount > 0) {
setStatus('Ready to play');
} else if (videoCount > 0) {
setStatus(`Loading videos... (${loadedVideos.size}/${videoCount})`);
} else {
setStatus('Ready to play');
}
};
// FIXED: Removed currentTime dependency to prevent excessive recreation
const handlePause = useCallback(() => {
if (isPlaying) {
setIsPlaying(false);
pausedTimeRef.current = currentTime;
Object.values(videoElements).forEach((video) => {
if (!video.paused) {
video.pause();
}
video.muted = true;
});
setVideoStates({});
if (animationRef.current) {
animationRef.current.stop();
animationRef.current = null;
}
}
}, [isPlaying, videoElements]);
const getActiveElements = useCallback(
(time) => {
return timelineElements.filter((element) => {
const elementEndTime = element.startTime + element.duration;
return time >= element.startTime && time < elementEndTime;
});
},
[timelineElements],
);
const getDesiredVideoStates = useCallback(
(time) => {
const states = {};
timelineElements.forEach((element) => {
if (element.type === 'video') {
const elementEndTime = element.startTime + element.duration;
states[element.id] = time >= element.startTime && time < elementEndTime;
}
});
return states;
},
[timelineElements],
);
const updateVideoTimes = useCallback(
(time) => {
timelineElements.forEach((element) => {
if (element.type === 'video' && videoElements[element.id]) {
const video = videoElements[element.id];
const elementEndTime = element.startTime + element.duration;
if (time >= element.startTime && time < elementEndTime) {
const relativeTime = time - element.startTime;
const videoTime = element.inPoint + relativeTime;
if (Math.abs(video.currentTime - videoTime) > 0.5) {
video.currentTime = videoTime;
}
}
}
});
},
[timelineElements, videoElements],
);
useEffect(() => {
if (!isPlaying) return;
const desiredStates = getDesiredVideoStates(currentTime);
Object.entries(desiredStates).forEach(([videoId, shouldPlay]) => {
const video = videoElements[videoId];
const isCurrentlyPlaying = !video?.paused;
if (video) {
if (shouldPlay && !isCurrentlyPlaying) {
video.muted = false;
video.play().catch((e) => console.warn('Video play failed:', e));
} else if (!shouldPlay && isCurrentlyPlaying) {
video.pause();
video.muted = true;
}
}
});
setVideoStates(desiredStates);
}, [currentTime, isPlaying, videoElements, getDesiredVideoStates]);
// FIXED: Properly stop animation when not playing
useEffect(() => {
if (!isPlaying) {
if (animationRef.current) {
animationRef.current.stop();
animationRef.current = null;
}
return;
}
let intervalId;
let isRunning = true;
const animateFrame = () => {
if (!isRunning) return;
const now = Date.now() / 1000;
const newTime = pausedTimeRef.current + (now - startTimeRef.current);
if (newTime >= totalDuration) {
handlePause();
handleSeek(0);
return;
}
lastUpdateRef.current = newTime;
setCurrentTime(newTime);
updateVideoTimes(newTime);
if (layerRef.current) {
layerRef.current.batchDraw();
}
};
startTimeRef.current = Date.now() / 1000;
intervalId = setInterval(animateFrame, FPS_INTERVAL);
animationRef.current = {
stop: () => {
isRunning = false;
if (intervalId) {
clearInterval(intervalId);
}
},
};
return () => {
isRunning = false;
if (intervalId) {
clearInterval(intervalId);
}
};
}, [isPlaying, totalDuration, handlePause, updateVideoTimes]);
// FIXED: Stabilized handlers
const handlePlay = useCallback(() => {
if (!isPlaying) {
setIsPlaying(true);
startTimeRef.current = Date.now() / 1000;
lastUpdateRef.current = 0;
setStatus('');
}
}, [isPlaying]);
const handleSeek = useCallback(
(time) => {
const clampedTime = Math.max(0, Math.min(time, totalDuration));
setCurrentTime(clampedTime);
pausedTimeRef.current = clampedTime;
updateVideoTimes(clampedTime);
setVideoStates({});
if (layerRef.current) {
layerRef.current.draw();
}
},
[totalDuration, updateVideoTimes],
);
const handleReset = useCallback(() => {
handlePause();
handleSeek(0);
lastUpdateRef.current = 0;
Object.values(videoElements).forEach((video) => {
video.muted = true;
});
}, [handlePause, handleSeek, videoElements]);
const activeElements = getActiveElements(currentTime);
// FIXED: Added missing dependencies to event listeners
useEffect(() => {
emitter.on('video-play', handlePlay);
emitter.on('video-reset', handleReset);
emitter.on('video-seek', handleSeek);
return () => {
emitter.off('video-play', handlePlay);
emitter.off('video-reset', handleReset);
emitter.off('video-seek', handleSeek);
};
}, [emitter, handlePlay, handleReset, handleSeek]);
return (
<div style={{ width: dimensions.width, height: dimensions.height }} className="rounded-3xl">
<VideoPreview
dimensions={dimensions}
currentTime={currentTime}
totalDuration={totalDuration}
isPlaying={isPlaying}
status={status}
isExporting={isExporting}
exportProgress={exportProgress}
exportStatus={exportStatus}
timelineElements={timelineElements}
activeElements={activeElements}
videoElements={videoElements}
loadedVideos={loadedVideos}
videoStates={videoStates}
ffmpegCommand={ffmpegCommand}
handlePlay={handlePlay}
handlePause={handlePause}
handleReset={handleReset}
handleSeek={handleSeek}
copyFFmpegCommand={copyFFmpegCommand}
exportVideo={exportVideo}
layerRef={layerRef}
/>
</div>
);
};
export default VideoEditor;