402 lines
13 KiB
JavaScript
402 lines
13 KiB
JavaScript
// TODO: I moved the sample timeline data to a dedicated file, and delayed the loading to 1 sec with useEffect. as such, alot of the ogics are broken. I need to make sure the delayed timeline should work like normal
|
|
|
|
import { useMitt } from '@/plugins/MittContext';
|
|
import useVideoEditorStore from '@/stores/VideoEditorStore';
|
|
import { useCallback, useEffect, useRef, useState } from 'react';
|
|
import sampleTimelineElements from './sample-timeline-data';
|
|
import useVideoExport from './video-export';
|
|
import VideoPreview from './video-preview';
|
|
|
|
const VideoEditor = ({ width, height }) => {
|
|
const [dimensions] = useState({
|
|
width: width,
|
|
height: height,
|
|
});
|
|
|
|
const [timelineElements, setTimelineElements] = useState([]);
|
|
|
|
const lastUpdateRef = useRef(0);
|
|
const emitter = useMitt();
|
|
|
|
const [currentTime, setCurrentTime] = useState(0);
|
|
const [isPlaying, setIsPlaying] = useState(false);
|
|
const [videoElements, setVideoElements] = useState({});
|
|
const [loadedVideos, setLoadedVideos] = useState(new Set());
|
|
const [status, setStatus] = useState('Loading videos...');
|
|
const [videoStates, setVideoStates] = useState({});
|
|
|
|
const animationRef = useRef(null);
|
|
const layerRef = useRef(null);
|
|
const startTimeRef = useRef(0);
|
|
const pausedTimeRef = useRef(0);
|
|
|
|
const { setVideoIsPlaying } = useVideoEditorStore();
|
|
|
|
useEffect(() => {
|
|
setTimeout(() => setTimelineElements(sampleTimelineElements), 1000);
|
|
}, []);
|
|
|
|
useEffect(() => {
|
|
setVideoIsPlaying(isPlaying);
|
|
}, [isPlaying, setVideoIsPlaying]);
|
|
|
|
const totalDuration = Math.max(...timelineElements.map((el) => el.startTime + el.duration));
|
|
|
|
// Use the FFmpeg hook
|
|
const { isExporting, exportProgress, exportStatus, ffmpegCommand, copyFFmpegCommand, exportVideo } = useVideoExport({
|
|
timelineElements,
|
|
dimensions,
|
|
totalDuration,
|
|
});
|
|
|
|
useEffect(() => {
|
|
const videoEls = {};
|
|
const videoElementsData = timelineElements.filter((el) => el.type === 'video');
|
|
|
|
videoElementsData.forEach((element) => {
|
|
const video = document.createElement('video');
|
|
video.crossOrigin = 'anonymous';
|
|
video.muted = true;
|
|
video.preload = 'metadata';
|
|
video.playsInline = true;
|
|
video.controls = false;
|
|
|
|
const sourceWebM = document.createElement('source');
|
|
sourceWebM.src = element.source_webm;
|
|
sourceWebM.type = 'video/webm; codecs=vp09.00.41.08';
|
|
|
|
const sourceMov = document.createElement('source');
|
|
sourceMov.src = element.source_mov;
|
|
sourceMov.type = 'video/quicktime; codecs=hvc1.1.6.H120.b0';
|
|
|
|
video.appendChild(sourceMov);
|
|
video.appendChild(sourceWebM);
|
|
|
|
const posterImg = new Image();
|
|
posterImg.crossOrigin = 'anonymous';
|
|
posterImg.src = element.poster;
|
|
|
|
posterImg.onload = () => {
|
|
const maxWidth = dimensions.width;
|
|
const maxHeight = dimensions.height;
|
|
const posterWidth = posterImg.naturalWidth;
|
|
const posterHeight = posterImg.naturalHeight;
|
|
|
|
let scaledWidth = posterWidth;
|
|
let scaledHeight = posterHeight;
|
|
|
|
if (posterWidth > maxWidth || posterHeight > maxHeight) {
|
|
const scaleX = maxWidth / posterWidth;
|
|
const scaleY = maxHeight / posterHeight;
|
|
const scale = Math.min(scaleX, scaleY);
|
|
|
|
scaledWidth = posterWidth * scale;
|
|
scaledHeight = posterHeight * scale;
|
|
}
|
|
|
|
const centeredX = (maxWidth - scaledWidth) / 2;
|
|
const centeredY = (maxHeight - scaledHeight) / 2;
|
|
|
|
setTimelineElements((prev) =>
|
|
prev.map((el) => {
|
|
if (el.id === element.id && el.type === 'video') {
|
|
return {
|
|
...el,
|
|
x: centeredX,
|
|
y: centeredY,
|
|
width: scaledWidth,
|
|
height: scaledHeight,
|
|
posterImage: posterImg,
|
|
isVideoPoster: true,
|
|
};
|
|
}
|
|
return el;
|
|
}),
|
|
);
|
|
|
|
setLoadedVideos((prev) => {
|
|
const newSet = new Set(prev);
|
|
newSet.add(element.id);
|
|
return newSet;
|
|
});
|
|
};
|
|
|
|
video.addEventListener('loadedmetadata', () => {
|
|
setTimelineElements((prev) =>
|
|
prev.map((el) => {
|
|
if (el.id === element.id && el.type === 'video') {
|
|
return {
|
|
...el,
|
|
videoElement: video,
|
|
isVideoReady: true,
|
|
};
|
|
}
|
|
return el;
|
|
}),
|
|
);
|
|
});
|
|
|
|
video.addEventListener('error', (e) => {
|
|
console.error(`Error loading video ${element.id}:`, e);
|
|
});
|
|
|
|
posterImg.onerror = (e) => {
|
|
console.error(`Error loading poster ${element.id}:`, e);
|
|
};
|
|
|
|
videoEls[element.id] = video;
|
|
});
|
|
|
|
setVideoElements(videoEls);
|
|
|
|
return () => {
|
|
Object.values(videoEls).forEach((video) => {
|
|
video.src = '';
|
|
video.load();
|
|
});
|
|
};
|
|
}, []);
|
|
|
|
useEffect(() => {
|
|
const videoCount = timelineElements.filter((el) => el.type === 'video').length;
|
|
if (loadedVideos.size === videoCount && videoCount > 0) {
|
|
setStatus('Ready to play');
|
|
} else if (videoCount > 0) {
|
|
setStatus(`Loading videos... (${loadedVideos.size}/${videoCount})`);
|
|
} else {
|
|
setStatus('Ready to play');
|
|
}
|
|
}, [loadedVideos, timelineElements]);
|
|
|
|
// FIXED: Removed currentTime dependency to prevent excessive recreation
|
|
const handlePause = useCallback(() => {
|
|
if (isPlaying) {
|
|
setIsPlaying(false);
|
|
pausedTimeRef.current = currentTime;
|
|
|
|
Object.values(videoElements).forEach((video) => {
|
|
if (!video.paused) {
|
|
video.pause();
|
|
}
|
|
video.muted = true;
|
|
});
|
|
|
|
setVideoStates({});
|
|
|
|
if (animationRef.current) {
|
|
animationRef.current.stop();
|
|
animationRef.current = null;
|
|
}
|
|
}
|
|
}, [isPlaying, videoElements]);
|
|
|
|
const getActiveElements = useCallback(
|
|
(time) => {
|
|
return timelineElements.filter((element) => {
|
|
const elementEndTime = element.startTime + element.duration;
|
|
return time >= element.startTime && time < elementEndTime;
|
|
});
|
|
},
|
|
[timelineElements],
|
|
);
|
|
|
|
const getDesiredVideoStates = useCallback(
|
|
(time) => {
|
|
const states = {};
|
|
timelineElements.forEach((element) => {
|
|
if (element.type === 'video') {
|
|
const elementEndTime = element.startTime + element.duration;
|
|
states[element.id] = time >= element.startTime && time < elementEndTime;
|
|
}
|
|
});
|
|
return states;
|
|
},
|
|
[timelineElements],
|
|
);
|
|
|
|
const updateVideoTimes = useCallback(
|
|
(time) => {
|
|
timelineElements.forEach((element) => {
|
|
if (element.type === 'video' && videoElements[element.id]) {
|
|
const video = videoElements[element.id];
|
|
const elementEndTime = element.startTime + element.duration;
|
|
|
|
if (time >= element.startTime && time < elementEndTime) {
|
|
const relativeTime = time - element.startTime;
|
|
const videoTime = element.inPoint + relativeTime;
|
|
|
|
if (Math.abs(video.currentTime - videoTime) > 0.5) {
|
|
video.currentTime = videoTime;
|
|
}
|
|
}
|
|
}
|
|
});
|
|
},
|
|
[timelineElements, videoElements],
|
|
);
|
|
|
|
useEffect(() => {
|
|
if (!isPlaying) return;
|
|
|
|
const desiredStates = getDesiredVideoStates(currentTime);
|
|
|
|
Object.entries(desiredStates).forEach(([videoId, shouldPlay]) => {
|
|
const video = videoElements[videoId];
|
|
const isCurrentlyPlaying = !video?.paused;
|
|
|
|
if (video) {
|
|
if (shouldPlay && !isCurrentlyPlaying) {
|
|
video.muted = false;
|
|
video.play().catch((e) => console.warn('Video play failed:', e));
|
|
} else if (!shouldPlay && isCurrentlyPlaying) {
|
|
video.pause();
|
|
video.muted = true;
|
|
}
|
|
}
|
|
});
|
|
|
|
setVideoStates(desiredStates);
|
|
}, [currentTime, isPlaying, videoElements, getDesiredVideoStates]);
|
|
|
|
// FIXED: Properly stop animation when not playing
|
|
useEffect(() => {
|
|
if (!isPlaying) {
|
|
if (animationRef.current) {
|
|
animationRef.current.stop();
|
|
animationRef.current = null;
|
|
}
|
|
return;
|
|
}
|
|
|
|
let animationId;
|
|
let isRunning = true;
|
|
|
|
const animateFrame = () => {
|
|
if (!isRunning) return;
|
|
|
|
const now = Date.now() / 1000;
|
|
const newTime = pausedTimeRef.current + (now - startTimeRef.current);
|
|
|
|
if (newTime >= totalDuration) {
|
|
handlePause();
|
|
handleSeek(0);
|
|
return;
|
|
}
|
|
|
|
if (newTime - lastUpdateRef.current >= 0.05) {
|
|
lastUpdateRef.current = newTime;
|
|
setCurrentTime(newTime);
|
|
updateVideoTimes(newTime);
|
|
|
|
if (layerRef.current) {
|
|
layerRef.current.batchDraw();
|
|
}
|
|
}
|
|
|
|
if (isRunning) {
|
|
animationId = requestAnimationFrame(animateFrame);
|
|
}
|
|
};
|
|
|
|
startTimeRef.current = Date.now() / 1000;
|
|
animationId = requestAnimationFrame(animateFrame);
|
|
|
|
animationRef.current = {
|
|
stop: () => {
|
|
isRunning = false;
|
|
if (animationId) {
|
|
cancelAnimationFrame(animationId);
|
|
}
|
|
},
|
|
};
|
|
|
|
return () => {
|
|
isRunning = false;
|
|
if (animationId) {
|
|
cancelAnimationFrame(animationId);
|
|
}
|
|
};
|
|
}, [isPlaying, totalDuration, handlePause, updateVideoTimes]);
|
|
|
|
// FIXED: Stabilized handlers
|
|
const handlePlay = useCallback(() => {
|
|
if (!isPlaying) {
|
|
setIsPlaying(true);
|
|
startTimeRef.current = Date.now() / 1000;
|
|
lastUpdateRef.current = 0;
|
|
setStatus('');
|
|
}
|
|
}, [isPlaying]);
|
|
|
|
const handleSeek = useCallback(
|
|
(time) => {
|
|
const clampedTime = Math.max(0, Math.min(time, totalDuration));
|
|
setCurrentTime(clampedTime);
|
|
pausedTimeRef.current = clampedTime;
|
|
updateVideoTimes(clampedTime);
|
|
|
|
setVideoStates({});
|
|
|
|
if (layerRef.current) {
|
|
layerRef.current.draw();
|
|
}
|
|
},
|
|
[totalDuration, updateVideoTimes],
|
|
);
|
|
|
|
const handleReset = useCallback(() => {
|
|
handlePause();
|
|
handleSeek(0);
|
|
lastUpdateRef.current = 0;
|
|
|
|
Object.values(videoElements).forEach((video) => {
|
|
video.muted = true;
|
|
});
|
|
}, [handlePause, handleSeek, videoElements]);
|
|
|
|
const activeElements = getActiveElements(currentTime);
|
|
|
|
// FIXED: Added missing dependencies to event listeners
|
|
useEffect(() => {
|
|
emitter.on('video-play', handlePlay);
|
|
emitter.on('video-reset', handleReset);
|
|
emitter.on('video-seek', handleSeek);
|
|
|
|
return () => {
|
|
emitter.off('video-play', handlePlay);
|
|
emitter.off('video-reset', handleReset);
|
|
emitter.off('video-seek', handleSeek);
|
|
};
|
|
}, [emitter, handlePlay, handleReset, handleSeek]);
|
|
|
|
return (
|
|
<div style={{ width: dimensions.width, height: dimensions.height }} className="rounded-3xl">
|
|
<VideoPreview
|
|
dimensions={dimensions}
|
|
currentTime={currentTime}
|
|
totalDuration={totalDuration}
|
|
isPlaying={isPlaying}
|
|
status={status}
|
|
isExporting={isExporting}
|
|
exportProgress={exportProgress}
|
|
exportStatus={exportStatus}
|
|
timelineElements={timelineElements}
|
|
activeElements={activeElements}
|
|
videoElements={videoElements}
|
|
loadedVideos={loadedVideos}
|
|
videoStates={videoStates}
|
|
ffmpegCommand={ffmpegCommand}
|
|
handlePlay={handlePlay}
|
|
handlePause={handlePause}
|
|
handleReset={handleReset}
|
|
handleSeek={handleSeek}
|
|
copyFFmpegCommand={copyFFmpegCommand}
|
|
exportVideo={exportVideo}
|
|
layerRef={layerRef}
|
|
/>
|
|
</div>
|
|
);
|
|
};
|
|
|
|
export default VideoEditor;
|