679 lines
24 KiB
JavaScript
679 lines
24 KiB
JavaScript
import { useMitt } from '@/plugins/MittContext';
|
|
import useVideoEditorStore from '@/stores/VideoEditorStore';
|
|
import { FFmpeg } from '@ffmpeg/ffmpeg';
|
|
import { fetchFile, toBlobURL } from '@ffmpeg/util';
|
|
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
|
import VideoPreview from './video-preview';
|
|
|
|
const VideoEditor = ({ width, height }) => {
|
|
const [dimensions] = useState({
|
|
width: width,
|
|
height: height,
|
|
});
|
|
|
|
const [timelineElements, setTimelineElements] = useState([
|
|
{
|
|
id: '1',
|
|
type: 'video',
|
|
source_webm: 'https://cdn.memeaigen.com/g1/webm/they-not-like-us-oiia-cat-version.webm',
|
|
source_mov: 'https://cdn.memeaigen.com/g1/mov/they-not-like-us-oiia-cat-version.mov',
|
|
poster: 'https://cdn.memeaigen.com/g1/webp/they-not-like-us-oiia-cat-version.webp',
|
|
name: 'They not like us cat',
|
|
startTime: 0,
|
|
layer: 0,
|
|
inPoint: 0,
|
|
duration: 5,
|
|
x: 50,
|
|
y: 50,
|
|
width: 300,
|
|
height: 200,
|
|
},
|
|
{
|
|
id: '2',
|
|
type: 'video',
|
|
source_webm: 'https://cdn.memeaigen.com/g1/webm/sad-cat.webm',
|
|
source_mov: 'https://cdn.memeaigen.com/g1/mov/sad-cat.mov',
|
|
poster: 'https://cdn.memeaigen.com/g1/webp/sad-cat.webp',
|
|
name: 'Sad cat meme',
|
|
startTime: 6,
|
|
layer: 0,
|
|
inPoint: 2,
|
|
duration: 4,
|
|
x: 100,
|
|
y: 100,
|
|
width: 250,
|
|
height: 150,
|
|
},
|
|
{
|
|
id: '3',
|
|
type: 'video',
|
|
source_webm: 'https://cdn.memeaigen.com/g1/webm/este-cat-dance.webm',
|
|
source_mov: 'https://cdn.memeaigen.com/g1/mov/este-cat-dance.mov',
|
|
poster: 'https://cdn.memeaigen.com/g1/webp/este-cat-dance.webp',
|
|
name: 'Este cat dance',
|
|
startTime: 2,
|
|
layer: 1,
|
|
inPoint: 1,
|
|
duration: 6,
|
|
x: 200,
|
|
y: 200,
|
|
width: 280,
|
|
height: 180,
|
|
},
|
|
{
|
|
id: '4',
|
|
type: 'text',
|
|
text: 'Welcome to the Timeline!',
|
|
startTime: 1,
|
|
layer: 2,
|
|
duration: 3,
|
|
x: 50,
|
|
y: 600,
|
|
fontSize: 24,
|
|
fill: 'white',
|
|
stroke: 'black',
|
|
strokeWidth: 1,
|
|
},
|
|
{
|
|
id: '5',
|
|
type: 'text',
|
|
text: 'Multiple videos playing!',
|
|
startTime: 3,
|
|
layer: 3,
|
|
duration: 4,
|
|
x: 50,
|
|
y: 650,
|
|
fontSize: 20,
|
|
fill: 'yellow',
|
|
stroke: 'red',
|
|
strokeWidth: 2,
|
|
},
|
|
]);
|
|
|
|
const lastUpdateRef = useRef(0);
|
|
const ffmpegRef = useRef(new FFmpeg());
|
|
const emitter = useMitt();
|
|
|
|
const [isExporting, setIsExporting] = useState(false);
|
|
const [exportProgress, setExportProgress] = useState(0);
|
|
const [exportStatus, setExportStatus] = useState('');
|
|
|
|
const [currentTime, setCurrentTime] = useState(0);
|
|
const [isPlaying, setIsPlaying] = useState(false);
|
|
const [videoElements, setVideoElements] = useState({});
|
|
const [loadedVideos, setLoadedVideos] = useState(new Set());
|
|
const [status, setStatus] = useState('Loading videos...');
|
|
const [videoStates, setVideoStates] = useState({});
|
|
|
|
const animationRef = useRef(null);
|
|
const layerRef = useRef(null);
|
|
const startTimeRef = useRef(0);
|
|
const pausedTimeRef = useRef(0);
|
|
|
|
const { setVideoIsPlaying } = useVideoEditorStore();
|
|
|
|
useEffect(() => {
|
|
setVideoIsPlaying(isPlaying);
|
|
}, [isPlaying, setVideoIsPlaying]);
|
|
|
|
const totalDuration = Math.max(...timelineElements.map((el) => el.startTime + el.duration));
|
|
|
|
const generateFFmpegCommand = useCallback(
|
|
(is_string = true, useLocalFiles = false) => {
|
|
console.log('🎬 STARTING FFmpeg generation');
|
|
|
|
const videos = timelineElements.filter((el) => el.type === 'video');
|
|
const texts = timelineElements.filter((el) => el.type === 'text');
|
|
|
|
console.log('Videos found:', videos.length);
|
|
|
|
if (videos.length === 0) {
|
|
if (is_string) {
|
|
return 'ffmpeg -f lavfi -i color=black:size=450x800:duration=1 -c:v libx264 -t 1 output.mp4';
|
|
} else {
|
|
return ['-f', 'lavfi', '-i', 'color=black:size=450x800:duration=1', '-c:v', 'libx264', '-t', '1', 'output.mp4'];
|
|
}
|
|
}
|
|
|
|
let inputArgs = [];
|
|
videos.forEach((v, i) => {
|
|
inputArgs.push('-i');
|
|
inputArgs.push(useLocalFiles ? `input${i}.webm` : v.source);
|
|
});
|
|
|
|
let filters = [];
|
|
filters.push(`color=black:size=${dimensions.width}x${dimensions.height}:duration=${totalDuration}[base]`);
|
|
|
|
let videoLayer = 'base';
|
|
videos.forEach((v, i) => {
|
|
filters.push(`[${i}:v]trim=start=${v.inPoint}:duration=${v.duration},setpts=PTS-STARTPTS[v${i}_trim]`);
|
|
filters.push(`[v${i}_trim]scale=${Math.round(v.width)}:${Math.round(v.height)}[v${i}_scale]`);
|
|
filters.push(
|
|
`[${videoLayer}][v${i}_scale]overlay=${Math.round(v.x)}:${Math.round(v.y)}:enable='between(t,${v.startTime},${
|
|
v.startTime + v.duration
|
|
})'[v${i}_out]`,
|
|
);
|
|
videoLayer = `v${i}_out`;
|
|
});
|
|
|
|
console.log('🎵 PROCESSING AUDIO FOR', videos.length, 'VIDEOS');
|
|
|
|
let audioOutputs = [];
|
|
videos.forEach((v, i) => {
|
|
const delay = Math.round(v.startTime * 1000);
|
|
console.log(`🎵 Audio ${i}: delay=${delay}ms, inPoint=${v.inPoint}, duration=${v.duration}`);
|
|
filters.push(`[${i}:a]atrim=start=${v.inPoint}:duration=${v.duration},asetpts=PTS-STARTPTS,adelay=${delay}|${delay}[a${i}]`);
|
|
audioOutputs.push(`[a${i}]`);
|
|
});
|
|
|
|
let audioArgs = [];
|
|
if (audioOutputs.length === 1) {
|
|
filters.push(`[a0]apad=pad_dur=${totalDuration}[audio_final]`);
|
|
audioArgs = ['-map', '[audio_final]', '-c:a', 'aac'];
|
|
} else if (audioOutputs.length > 1) {
|
|
filters.push(`${audioOutputs.join('')}amix=inputs=${audioOutputs.length}:duration=longest[audio_final]`);
|
|
audioArgs = ['-map', '[audio_final]', '-c:a', 'aac'];
|
|
}
|
|
|
|
console.log('🎵 Audio args:', audioArgs);
|
|
|
|
texts.forEach((t, i) => {
|
|
const escapedText = t.text.replace(/'/g, is_string ? "\\'" : "'").replace(/:/g, '\\:');
|
|
|
|
filters.push(
|
|
`[${videoLayer}]drawtext=fontfile=/arial.ttf:text='${escapedText}':x=${Math.round(
|
|
t.x,
|
|
)}:y=${Math.round(t.y)}:fontsize=${t.fontSize}:fontcolor=${t.fill}:borderw=${t.strokeWidth}:bordercolor=${
|
|
t.stroke
|
|
}:enable='between(t,${t.startTime},${t.startTime + t.duration})'[t${i}]`,
|
|
);
|
|
videoLayer = `t${i}`;
|
|
});
|
|
|
|
const filterComplex = filters.join('; ');
|
|
console.log('🎵 Filter includes atrim:', filterComplex.includes('atrim'));
|
|
|
|
const finalArgs = [
|
|
...inputArgs,
|
|
'-filter_complex',
|
|
filterComplex,
|
|
'-map',
|
|
`[${videoLayer}]`,
|
|
...audioArgs,
|
|
'-c:v',
|
|
'libx264',
|
|
'-pix_fmt',
|
|
'yuv420p',
|
|
'-r',
|
|
'30',
|
|
'-t',
|
|
totalDuration.toString(),
|
|
'output.mp4',
|
|
];
|
|
|
|
if (is_string) {
|
|
const inputs = videos.map((v, i) => `-i "${useLocalFiles ? `input${i}.webm` : v.source}"`).join(' ');
|
|
const audioMap = audioArgs.length > 0 ? ` ${audioArgs.join(' ')}` : '';
|
|
const command = `ffmpeg ${inputs} -filter_complex "${filterComplex}" -map "[${videoLayer}]"${audioMap} -c:v libx264 -pix_fmt yuv420p -r 30 -t ${totalDuration} output.mp4`;
|
|
|
|
console.log('🎵 FINAL COMMAND HAS AUDIO:', command.includes('atrim') && command.includes('audio_final'));
|
|
|
|
return command;
|
|
} else {
|
|
console.log('🎵 FINAL ARGS HAVE AUDIO:', finalArgs.includes('atrim') && finalArgs.includes('audio_final'));
|
|
|
|
return finalArgs;
|
|
}
|
|
},
|
|
[timelineElements, dimensions, totalDuration],
|
|
);
|
|
|
|
const ffmpegCommand = useMemo(() => {
|
|
return generateFFmpegCommand(true, false);
|
|
}, [generateFFmpegCommand]);
|
|
|
|
const copyFFmpegCommand = useCallback(() => {
|
|
console.log('🎬 FFMPEG COMMAND GENERATED:');
|
|
console.log('Command:', ffmpegCommand);
|
|
navigator.clipboard.writeText(ffmpegCommand);
|
|
}, [ffmpegCommand]);
|
|
|
|
useEffect(() => {
|
|
const videoEls = {};
|
|
const videoElementsData = timelineElements.filter((el) => el.type === 'video');
|
|
|
|
videoElementsData.forEach((element) => {
|
|
const video = document.createElement('video');
|
|
video.crossOrigin = 'anonymous';
|
|
video.muted = true;
|
|
video.preload = 'metadata';
|
|
video.playsInline = true;
|
|
video.controls = false;
|
|
|
|
const sourceWebM = document.createElement('source');
|
|
sourceWebM.src = element.source_webm;
|
|
sourceWebM.type = 'video/webm; codecs=vp09.00.41.08';
|
|
|
|
const sourceMov = document.createElement('source');
|
|
sourceMov.src = element.source_mov;
|
|
sourceMov.type = 'video/quicktime; codecs=hvc1.1.6.H120.b0';
|
|
|
|
video.appendChild(sourceMov);
|
|
video.appendChild(sourceWebM);
|
|
|
|
const posterImg = new Image();
|
|
posterImg.crossOrigin = 'anonymous';
|
|
posterImg.src = element.poster;
|
|
|
|
posterImg.onload = () => {
|
|
const maxWidth = dimensions.width;
|
|
const maxHeight = dimensions.height;
|
|
const posterWidth = posterImg.naturalWidth;
|
|
const posterHeight = posterImg.naturalHeight;
|
|
|
|
let scaledWidth = posterWidth;
|
|
let scaledHeight = posterHeight;
|
|
|
|
if (posterWidth > maxWidth || posterHeight > maxHeight) {
|
|
const scaleX = maxWidth / posterWidth;
|
|
const scaleY = maxHeight / posterHeight;
|
|
const scale = Math.min(scaleX, scaleY);
|
|
|
|
scaledWidth = posterWidth * scale;
|
|
scaledHeight = posterHeight * scale;
|
|
}
|
|
|
|
const centeredX = (maxWidth - scaledWidth) / 2;
|
|
const centeredY = (maxHeight - scaledHeight) / 2;
|
|
|
|
setTimelineElements((prev) =>
|
|
prev.map((el) => {
|
|
if (el.id === element.id && el.type === 'video') {
|
|
return {
|
|
...el,
|
|
x: centeredX,
|
|
y: centeredY,
|
|
width: scaledWidth,
|
|
height: scaledHeight,
|
|
posterImage: posterImg,
|
|
isVideoPoster: true,
|
|
};
|
|
}
|
|
return el;
|
|
}),
|
|
);
|
|
|
|
setLoadedVideos((prev) => {
|
|
const newSet = new Set(prev);
|
|
newSet.add(element.id);
|
|
return newSet;
|
|
});
|
|
};
|
|
|
|
video.addEventListener('loadedmetadata', () => {
|
|
setTimelineElements((prev) =>
|
|
prev.map((el) => {
|
|
if (el.id === element.id && el.type === 'video') {
|
|
return {
|
|
...el,
|
|
videoElement: video,
|
|
isVideoReady: true,
|
|
};
|
|
}
|
|
return el;
|
|
}),
|
|
);
|
|
});
|
|
|
|
video.addEventListener('error', (e) => {
|
|
console.error(`Error loading video ${element.id}:`, e);
|
|
});
|
|
|
|
posterImg.onerror = (e) => {
|
|
console.error(`Error loading poster ${element.id}:`, e);
|
|
};
|
|
|
|
videoEls[element.id] = video;
|
|
});
|
|
|
|
setVideoElements(videoEls);
|
|
|
|
return () => {
|
|
Object.values(videoEls).forEach((video) => {
|
|
video.src = '';
|
|
video.load();
|
|
});
|
|
};
|
|
}, []);
|
|
|
|
useEffect(() => {
|
|
const videoCount = timelineElements.filter((el) => el.type === 'video').length;
|
|
if (loadedVideos.size === videoCount && videoCount > 0) {
|
|
setStatus('Ready to play');
|
|
} else if (videoCount > 0) {
|
|
setStatus(`Loading videos... (${loadedVideos.size}/${videoCount})`);
|
|
} else {
|
|
setStatus('Ready to play');
|
|
}
|
|
}, [loadedVideos, timelineElements]);
|
|
|
|
// FIXED: Removed currentTime dependency to prevent excessive recreation
|
|
const handlePause = useCallback(() => {
|
|
if (isPlaying) {
|
|
setIsPlaying(false);
|
|
pausedTimeRef.current = currentTime;
|
|
|
|
Object.values(videoElements).forEach((video) => {
|
|
if (!video.paused) {
|
|
video.pause();
|
|
}
|
|
video.muted = true;
|
|
});
|
|
|
|
setVideoStates({});
|
|
|
|
if (animationRef.current) {
|
|
animationRef.current.stop();
|
|
animationRef.current = null;
|
|
}
|
|
}
|
|
}, [isPlaying, videoElements]);
|
|
|
|
const exportVideo = async () => {
|
|
setIsExporting(true);
|
|
setExportProgress(0);
|
|
setExportStatus('Starting export...');
|
|
|
|
try {
|
|
setExportStatus('Loading FFmpeg...');
|
|
|
|
const ffmpeg = new FFmpeg();
|
|
|
|
ffmpeg.on('progress', ({ progress }) => {
|
|
setExportProgress(Math.round(progress * 100));
|
|
});
|
|
|
|
ffmpeg.on('log', ({ message }) => {
|
|
console.log(message);
|
|
});
|
|
|
|
const baseURL = 'https://unpkg.com/@ffmpeg/core@0.12.6/dist/esm';
|
|
const coreURL = `${baseURL}/ffmpeg-core.js`;
|
|
const wasmURL = `${baseURL}/ffmpeg-core.wasm`;
|
|
|
|
console.log('Converting JS coreURL...');
|
|
const coreBlobURL = await toBlobURL(coreURL, 'text/javascript');
|
|
console.log('JS coreURL ready:', coreBlobURL);
|
|
|
|
console.log('Converting WASM URL...');
|
|
const wasmBlobURL = await toBlobURL(wasmURL, 'application/wasm');
|
|
console.log('WASM URL ready:', wasmBlobURL);
|
|
|
|
console.log('Calling ffmpeg.load...');
|
|
await ffmpeg.load({
|
|
coreURL: coreBlobURL,
|
|
wasmURL: wasmBlobURL,
|
|
});
|
|
console.log('FFmpeg loaded!');
|
|
setExportProgress(20);
|
|
|
|
setExportStatus('Loading font...');
|
|
await ffmpeg.writeFile('arial.ttf', await fetchFile('https://raw.githubusercontent.com/ffmpegwasm/testdata/master/arial.ttf'));
|
|
console.log('Font loaded!');
|
|
setExportProgress(30);
|
|
|
|
setExportStatus('Downloading videos...');
|
|
const videos = timelineElements.filter((el) => el.type === 'video');
|
|
|
|
for (let i = 0; i < videos.length; i++) {
|
|
await ffmpeg.writeFile(`input${i}.webm`, await fetchFile(videos[i].source));
|
|
setExportProgress(30 + Math.round(((i + 1) / videos.length) * 30));
|
|
}
|
|
|
|
setExportStatus('Processing video...');
|
|
let args = generateFFmpegCommand(false, true);
|
|
|
|
setExportProgress(70);
|
|
await ffmpeg.exec(args);
|
|
|
|
setExportStatus('Downloading...');
|
|
setExportProgress(90);
|
|
|
|
const fileData = await ffmpeg.readFile('output.mp4');
|
|
const data = new Uint8Array(fileData);
|
|
|
|
const blob = new Blob([data.buffer], { type: 'video/mp4' });
|
|
const url = URL.createObjectURL(blob);
|
|
|
|
const link = document.createElement('a');
|
|
link.href = url;
|
|
link.download = 'exported_video.mp4';
|
|
link.click();
|
|
URL.revokeObjectURL(url);
|
|
|
|
setExportProgress(100);
|
|
setExportStatus('Complete!');
|
|
|
|
ffmpeg.terminate();
|
|
} catch (error) {
|
|
console.error('Export error:', error);
|
|
setExportStatus(`Failed: ${error.message}`);
|
|
} finally {
|
|
setTimeout(() => {
|
|
setIsExporting(false);
|
|
setExportStatus('');
|
|
setExportProgress(0);
|
|
}, 3000);
|
|
}
|
|
};
|
|
|
|
const getActiveElements = useCallback(
|
|
(time) => {
|
|
return timelineElements.filter((element) => {
|
|
const elementEndTime = element.startTime + element.duration;
|
|
return time >= element.startTime && time < elementEndTime;
|
|
});
|
|
},
|
|
[timelineElements],
|
|
);
|
|
|
|
const getDesiredVideoStates = useCallback(
|
|
(time) => {
|
|
const states = {};
|
|
timelineElements.forEach((element) => {
|
|
if (element.type === 'video') {
|
|
const elementEndTime = element.startTime + element.duration;
|
|
states[element.id] = time >= element.startTime && time < elementEndTime;
|
|
}
|
|
});
|
|
return states;
|
|
},
|
|
[timelineElements],
|
|
);
|
|
|
|
const updateVideoTimes = useCallback(
|
|
(time) => {
|
|
timelineElements.forEach((element) => {
|
|
if (element.type === 'video' && videoElements[element.id]) {
|
|
const video = videoElements[element.id];
|
|
const elementEndTime = element.startTime + element.duration;
|
|
|
|
if (time >= element.startTime && time < elementEndTime) {
|
|
const relativeTime = time - element.startTime;
|
|
const videoTime = element.inPoint + relativeTime;
|
|
|
|
if (Math.abs(video.currentTime - videoTime) > 0.5) {
|
|
video.currentTime = videoTime;
|
|
}
|
|
}
|
|
}
|
|
});
|
|
},
|
|
[timelineElements, videoElements],
|
|
);
|
|
|
|
useEffect(() => {
|
|
if (!isPlaying) return;
|
|
|
|
const desiredStates = getDesiredVideoStates(currentTime);
|
|
|
|
Object.entries(desiredStates).forEach(([videoId, shouldPlay]) => {
|
|
const video = videoElements[videoId];
|
|
const isCurrentlyPlaying = !video?.paused;
|
|
|
|
if (video) {
|
|
if (shouldPlay && !isCurrentlyPlaying) {
|
|
video.muted = false;
|
|
video.play().catch((e) => console.warn('Video play failed:', e));
|
|
} else if (!shouldPlay && isCurrentlyPlaying) {
|
|
video.pause();
|
|
video.muted = true;
|
|
}
|
|
}
|
|
});
|
|
|
|
setVideoStates(desiredStates);
|
|
}, [currentTime, isPlaying, videoElements, getDesiredVideoStates]);
|
|
|
|
// FIXED: Properly stop animation when not playing
|
|
useEffect(() => {
|
|
if (!isPlaying) {
|
|
if (animationRef.current) {
|
|
animationRef.current.stop();
|
|
animationRef.current = null;
|
|
}
|
|
return;
|
|
}
|
|
|
|
let animationId;
|
|
let isRunning = true;
|
|
|
|
const animateFrame = () => {
|
|
if (!isRunning) return;
|
|
|
|
const now = Date.now() / 1000;
|
|
const newTime = pausedTimeRef.current + (now - startTimeRef.current);
|
|
|
|
if (newTime >= totalDuration) {
|
|
handlePause();
|
|
handleSeek(0);
|
|
return;
|
|
}
|
|
|
|
if (newTime - lastUpdateRef.current >= 0.05) {
|
|
lastUpdateRef.current = newTime;
|
|
setCurrentTime(newTime);
|
|
updateVideoTimes(newTime);
|
|
|
|
if (layerRef.current) {
|
|
layerRef.current.batchDraw();
|
|
}
|
|
}
|
|
|
|
if (isRunning) {
|
|
animationId = requestAnimationFrame(animateFrame);
|
|
}
|
|
};
|
|
|
|
startTimeRef.current = Date.now() / 1000;
|
|
animationId = requestAnimationFrame(animateFrame);
|
|
|
|
animationRef.current = {
|
|
stop: () => {
|
|
isRunning = false;
|
|
if (animationId) {
|
|
cancelAnimationFrame(animationId);
|
|
}
|
|
},
|
|
};
|
|
|
|
return () => {
|
|
isRunning = false;
|
|
if (animationId) {
|
|
cancelAnimationFrame(animationId);
|
|
}
|
|
};
|
|
}, [isPlaying, totalDuration, handlePause, updateVideoTimes]);
|
|
|
|
// FIXED: Stabilized handlers
|
|
const handlePlay = useCallback(() => {
|
|
if (!isPlaying) {
|
|
setIsPlaying(true);
|
|
startTimeRef.current = Date.now() / 1000;
|
|
lastUpdateRef.current = 0;
|
|
setStatus('');
|
|
}
|
|
}, [isPlaying]);
|
|
|
|
const handleSeek = useCallback(
|
|
(time) => {
|
|
const clampedTime = Math.max(0, Math.min(time, totalDuration));
|
|
setCurrentTime(clampedTime);
|
|
pausedTimeRef.current = clampedTime;
|
|
updateVideoTimes(clampedTime);
|
|
|
|
setVideoStates({});
|
|
|
|
if (layerRef.current) {
|
|
layerRef.current.draw();
|
|
}
|
|
},
|
|
[totalDuration, updateVideoTimes],
|
|
);
|
|
|
|
const handleReset = useCallback(() => {
|
|
handlePause();
|
|
handleSeek(0);
|
|
lastUpdateRef.current = 0;
|
|
|
|
Object.values(videoElements).forEach((video) => {
|
|
video.muted = true;
|
|
});
|
|
}, [handlePause, handleSeek, videoElements]);
|
|
|
|
const activeElements = getActiveElements(currentTime);
|
|
|
|
// FIXED: Added missing dependencies to event listeners
|
|
useEffect(() => {
|
|
emitter.on('video-play', handlePlay);
|
|
emitter.on('video-reset', handleReset);
|
|
emitter.on('video-seek', handleSeek);
|
|
|
|
return () => {
|
|
emitter.off('video-play', handlePlay);
|
|
emitter.off('video-reset', handleReset);
|
|
emitter.off('video-seek', handleSeek);
|
|
};
|
|
}, [emitter, handlePlay, handleReset, handleSeek]);
|
|
|
|
return (
|
|
<div style={{ width: dimensions.width, height: dimensions.height }} className="rounded-3xl">
|
|
<VideoPreview
|
|
dimensions={dimensions}
|
|
currentTime={currentTime}
|
|
totalDuration={totalDuration}
|
|
isPlaying={isPlaying}
|
|
status={status}
|
|
isExporting={isExporting}
|
|
exportProgress={exportProgress}
|
|
exportStatus={exportStatus}
|
|
timelineElements={timelineElements}
|
|
activeElements={activeElements}
|
|
videoElements={videoElements}
|
|
loadedVideos={loadedVideos}
|
|
videoStates={videoStates}
|
|
ffmpegCommand={ffmpegCommand}
|
|
handlePlay={handlePlay}
|
|
handlePause={handlePause}
|
|
handleReset={handleReset}
|
|
handleSeek={handleSeek}
|
|
copyFFmpegCommand={copyFFmpegCommand}
|
|
exportVideo={exportVideo}
|
|
layerRef={layerRef}
|
|
/>
|
|
</div>
|
|
);
|
|
};
|
|
|
|
export default VideoEditor;
|