Update
This commit is contained in:
655
resources/js/modules/editor/partials/canvas/video-editor.jsx
Normal file
655
resources/js/modules/editor/partials/canvas/video-editor.jsx
Normal file
@@ -0,0 +1,655 @@
|
||||
import { FFmpeg } from '@ffmpeg/ffmpeg';
|
||||
import { fetchFile, toBlobURL } from '@ffmpeg/util';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import VideoPreview from './video-preview';
|
||||
|
||||
const VideoEditor = ({ width, height }) => {
|
||||
const [dimensions] = useState({
|
||||
width: width,
|
||||
height: height,
|
||||
});
|
||||
|
||||
const [timelineElements, setTimelineElements] = useState([
|
||||
{
|
||||
id: '1',
|
||||
type: 'video',
|
||||
source_webm: 'https://cdn.memeaigen.com/g1/webm/they-not-like-us-oiia-cat-version.webm',
|
||||
source_mov: 'https://cdn.memeaigen.com/g1/mov/they-not-like-us-oiia-cat-version.mov',
|
||||
poster: 'https://cdn.memeaigen.com/g1/webm/they-not-like-us-oiia-cat-version.webp',
|
||||
name: 'They not like us cat',
|
||||
startTime: 0,
|
||||
layer: 0,
|
||||
inPoint: 0,
|
||||
duration: 5,
|
||||
x: 50,
|
||||
y: 50,
|
||||
width: 300, // Will be updated when video loads
|
||||
height: 200, // Will be updated when video loads
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
type: 'video',
|
||||
source: 'https://cdn.memeaigen.com/g1/webm/sad-cat.webm',
|
||||
name: 'Sad cat meme',
|
||||
startTime: 6,
|
||||
layer: 0,
|
||||
inPoint: 2,
|
||||
duration: 4,
|
||||
x: 100,
|
||||
y: 100,
|
||||
width: 250, // Will be updated when video loads
|
||||
height: 150, // Will be updated when video loads
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
type: 'video',
|
||||
source: 'https://cdn.memeaigen.com/g1/webm/este-cat-dance.webm',
|
||||
name: 'Este cat dance',
|
||||
startTime: 2,
|
||||
layer: 1,
|
||||
inPoint: 1,
|
||||
duration: 6,
|
||||
x: 200,
|
||||
y: 200,
|
||||
width: 280, // Will be updated when video loads
|
||||
height: 180, // Will be updated when video loads
|
||||
},
|
||||
{
|
||||
id: '4',
|
||||
type: 'text',
|
||||
text: 'Welcome to the Timeline!',
|
||||
startTime: 1,
|
||||
layer: 2,
|
||||
duration: 3,
|
||||
x: 50,
|
||||
y: 600,
|
||||
fontSize: 24,
|
||||
fill: 'white',
|
||||
stroke: 'black',
|
||||
strokeWidth: 1,
|
||||
},
|
||||
{
|
||||
id: '5',
|
||||
type: 'text',
|
||||
text: 'Multiple videos playing!',
|
||||
startTime: 3,
|
||||
layer: 3,
|
||||
duration: 4,
|
||||
x: 50,
|
||||
y: 650,
|
||||
fontSize: 20,
|
||||
fill: 'yellow',
|
||||
stroke: 'red',
|
||||
strokeWidth: 2,
|
||||
},
|
||||
]);
|
||||
|
||||
const lastUpdateRef = useRef(0);
|
||||
|
||||
// FFmpeg WASM states
|
||||
const ffmpegRef = useRef(new FFmpeg());
|
||||
|
||||
const [isExporting, setIsExporting] = useState(false);
|
||||
const [exportProgress, setExportProgress] = useState(0);
|
||||
const [exportStatus, setExportStatus] = useState('');
|
||||
|
||||
const [currentTime, setCurrentTime] = useState(0);
|
||||
const [isPlaying, setIsPlaying] = useState(false);
|
||||
const [videoElements, setVideoElements] = useState({});
|
||||
const [loadedVideos, setLoadedVideos] = useState(new Set());
|
||||
const [status, setStatus] = useState('Loading videos...');
|
||||
|
||||
// Track which videos should be playing - this is the key optimization
|
||||
const [videoStates, setVideoStates] = useState({});
|
||||
|
||||
const animationRef = useRef(null);
|
||||
const layerRef = useRef(null);
|
||||
const startTimeRef = useRef(0);
|
||||
const pausedTimeRef = useRef(0);
|
||||
|
||||
// Calculate total timeline duration
|
||||
const totalDuration = Math.max(...timelineElements.map((el) => el.startTime + el.duration));
|
||||
|
||||
// Generate FFmpeg command - COMPLETE VERSION
|
||||
const generateFFmpegCommand = useCallback(
|
||||
(is_string = true, useLocalFiles = false) => {
|
||||
console.log('🎬 STARTING FFmpeg generation');
|
||||
|
||||
const videos = timelineElements.filter((el) => el.type === 'video');
|
||||
const texts = timelineElements.filter((el) => el.type === 'text');
|
||||
|
||||
console.log('Videos found:', videos.length);
|
||||
|
||||
if (videos.length === 0) {
|
||||
if (is_string) {
|
||||
return 'ffmpeg -f lavfi -i color=black:size=450x800:duration=1 -c:v libx264 -t 1 output.mp4';
|
||||
} else {
|
||||
return ['-f', 'lavfi', '-i', 'color=black:size=450x800:duration=1', '-c:v', 'libx264', '-t', '1', 'output.mp4'];
|
||||
}
|
||||
}
|
||||
|
||||
// Build inputs
|
||||
let inputArgs = [];
|
||||
videos.forEach((v, i) => {
|
||||
inputArgs.push('-i');
|
||||
inputArgs.push(useLocalFiles ? `input${i}.webm` : v.source);
|
||||
});
|
||||
|
||||
// Build filter parts array
|
||||
let filters = [];
|
||||
|
||||
// Base canvas
|
||||
filters.push(`color=black:size=${dimensions.width}x${dimensions.height}:duration=${totalDuration}[base]`);
|
||||
|
||||
// Process video streams
|
||||
let videoLayer = 'base';
|
||||
videos.forEach((v, i) => {
|
||||
filters.push(`[${i}:v]trim=start=${v.inPoint}:duration=${v.duration},setpts=PTS-STARTPTS[v${i}_trim]`);
|
||||
filters.push(`[v${i}_trim]scale=${Math.round(v.width)}:${Math.round(v.height)}[v${i}_scale]`);
|
||||
filters.push(
|
||||
`[${videoLayer}][v${i}_scale]overlay=${Math.round(v.x)}:${Math.round(v.y)}:enable='between(t,${v.startTime},${
|
||||
v.startTime + v.duration
|
||||
})'[v${i}_out]`,
|
||||
);
|
||||
videoLayer = `v${i}_out`;
|
||||
});
|
||||
|
||||
// AUDIO PROCESSING - EXPLICIT AND COMPLETE
|
||||
console.log('🎵 PROCESSING AUDIO FOR', videos.length, 'VIDEOS');
|
||||
|
||||
let audioOutputs = [];
|
||||
videos.forEach((v, i) => {
|
||||
const delay = Math.round(v.startTime * 1000);
|
||||
console.log(`🎵 Audio ${i}: delay=${delay}ms, inPoint=${v.inPoint}, duration=${v.duration}`);
|
||||
filters.push(`[${i}:a]atrim=start=${v.inPoint}:duration=${v.duration},asetpts=PTS-STARTPTS,adelay=${delay}|${delay}[a${i}]`);
|
||||
audioOutputs.push(`[a${i}]`);
|
||||
});
|
||||
|
||||
// Audio mixing
|
||||
let audioArgs = [];
|
||||
if (audioOutputs.length === 1) {
|
||||
filters.push(`[a0]apad=pad_dur=${totalDuration}[audio_final]`);
|
||||
audioArgs = ['-map', '[audio_final]', '-c:a', 'aac'];
|
||||
} else if (audioOutputs.length > 1) {
|
||||
filters.push(`${audioOutputs.join('')}amix=inputs=${audioOutputs.length}:duration=longest[audio_final]`);
|
||||
audioArgs = ['-map', '[audio_final]', '-c:a', 'aac'];
|
||||
}
|
||||
|
||||
console.log('🎵 Audio args:', audioArgs);
|
||||
|
||||
// Add text overlays
|
||||
texts.forEach((t, i) => {
|
||||
const escapedText = t.text.replace(/'/g, is_string ? "\\'" : "'").replace(/:/g, '\\:');
|
||||
|
||||
filters.push(
|
||||
`[${videoLayer}]drawtext=fontfile=/arial.ttf:text='${escapedText}':x=${Math.round(
|
||||
t.x,
|
||||
)}:y=${Math.round(t.y)}:fontsize=${t.fontSize}:fontcolor=${t.fill}:borderw=${t.strokeWidth}:bordercolor=${
|
||||
t.stroke
|
||||
}:enable='between(t,${t.startTime},${t.startTime + t.duration})'[t${i}]`,
|
||||
);
|
||||
videoLayer = `t${i}`;
|
||||
});
|
||||
// Join all filter parts
|
||||
const filterComplex = filters.join('; ');
|
||||
console.log('🎵 Filter includes atrim:', filterComplex.includes('atrim'));
|
||||
|
||||
// Build final arguments
|
||||
const finalArgs = [
|
||||
...inputArgs,
|
||||
'-filter_complex',
|
||||
filterComplex,
|
||||
'-map',
|
||||
`[${videoLayer}]`,
|
||||
...audioArgs,
|
||||
'-c:v',
|
||||
'libx264',
|
||||
'-pix_fmt',
|
||||
'yuv420p',
|
||||
'-r',
|
||||
'30',
|
||||
'-t',
|
||||
totalDuration.toString(),
|
||||
'output.mp4',
|
||||
];
|
||||
|
||||
if (is_string) {
|
||||
// Build final command string
|
||||
const inputs = videos.map((v, i) => `-i "${useLocalFiles ? `input${i}.webm` : v.source}"`).join(' ');
|
||||
const audioMap = audioArgs.length > 0 ? ` ${audioArgs.join(' ')}` : '';
|
||||
const command = `ffmpeg ${inputs} -filter_complex "${filterComplex}" -map "[${videoLayer}]"${audioMap} -c:v libx264 -pix_fmt yuv420p -r 30 -t ${totalDuration} output.mp4`;
|
||||
|
||||
console.log('🎵 FINAL COMMAND HAS AUDIO:', command.includes('atrim') && command.includes('audio_final'));
|
||||
|
||||
return command;
|
||||
} else {
|
||||
console.log('🎵 FINAL ARGS HAVE AUDIO:', finalArgs.includes('atrim') && finalArgs.includes('audio_final'));
|
||||
|
||||
return finalArgs;
|
||||
}
|
||||
},
|
||||
[timelineElements, dimensions, totalDuration],
|
||||
);
|
||||
|
||||
// Memoize the FFmpeg command
|
||||
const ffmpegCommand = useMemo(() => {
|
||||
return generateFFmpegCommand(true, false);
|
||||
}, [generateFFmpegCommand]);
|
||||
|
||||
// Memoize the copy function
|
||||
const copyFFmpegCommand = useCallback(() => {
|
||||
console.log('🎬 FFMPEG COMMAND GENERATED:');
|
||||
console.log('Command:', ffmpegCommand);
|
||||
navigator.clipboard.writeText(ffmpegCommand);
|
||||
}, [ffmpegCommand]);
|
||||
|
||||
// Create video elements
|
||||
useEffect(() => {
|
||||
const videoEls = {};
|
||||
const videoElementsData = timelineElements.filter((el) => el.type === 'video');
|
||||
|
||||
videoElementsData.forEach((element) => {
|
||||
const video = document.createElement('video');
|
||||
video.poster = element.poster;
|
||||
video.crossOrigin = 'anonymous';
|
||||
video.muted = true; // Start muted, unmute on play
|
||||
video.preload = 'auto'; // Preload entire video content
|
||||
video.playsInline = true; // Better mobile performance
|
||||
video.controls = false; // Remove native controls
|
||||
|
||||
const sourceWebM = document.createElement('source');
|
||||
sourceWebM.src = element.source_webm;
|
||||
sourceWebM.type = 'video/webm; codecs=vp09.00.41.08';
|
||||
|
||||
const sourceMov = document.createElement('source');
|
||||
sourceMov.src = element.source_mov;
|
||||
sourceMov.type = 'video/quicktime; codecs=hvc1.1.6.H120.b0';
|
||||
|
||||
video.appendChild(sourceMov);
|
||||
video.appendChild(sourceWebM);
|
||||
|
||||
video.addEventListener('loadedmetadata', () => {
|
||||
// Calculate scaling to fit within canvas while maintaining aspect ratio
|
||||
const maxWidth = dimensions.width;
|
||||
const maxHeight = dimensions.height;
|
||||
const videoWidth = video.videoWidth;
|
||||
const videoHeight = video.videoHeight;
|
||||
|
||||
let scaledWidth = videoWidth;
|
||||
let scaledHeight = videoHeight;
|
||||
|
||||
// Only scale down if video is larger than canvas
|
||||
if (videoWidth > maxWidth || videoHeight > maxHeight) {
|
||||
const scaleX = maxWidth / videoWidth;
|
||||
const scaleY = maxHeight / videoHeight;
|
||||
const scale = Math.min(scaleX, scaleY); // Use smaller scale to fit both dimensions
|
||||
|
||||
scaledWidth = videoWidth * scale;
|
||||
scaledHeight = videoHeight * scale;
|
||||
}
|
||||
|
||||
// Center the video in the canvas
|
||||
const centeredX = (maxWidth - scaledWidth) / 2;
|
||||
const centeredY = (maxHeight - scaledHeight) / 2;
|
||||
|
||||
// Update timeline element with scaled and centered video
|
||||
setTimelineElements((prev) =>
|
||||
prev.map((el) => {
|
||||
if (el.id === element.id && el.type === 'video') {
|
||||
return {
|
||||
...el,
|
||||
x: centeredX,
|
||||
y: centeredY,
|
||||
width: scaledWidth,
|
||||
height: scaledHeight,
|
||||
};
|
||||
}
|
||||
return el;
|
||||
}),
|
||||
);
|
||||
|
||||
setLoadedVideos((prev) => {
|
||||
const newSet = new Set(prev);
|
||||
newSet.add(element.id);
|
||||
return newSet;
|
||||
});
|
||||
});
|
||||
|
||||
video.addEventListener('error', (e) => {
|
||||
console.error(`Error loading video ${element.id}:`, e);
|
||||
});
|
||||
|
||||
videoEls[element.id] = video;
|
||||
});
|
||||
|
||||
setVideoElements(videoEls);
|
||||
|
||||
return () => {
|
||||
Object.values(videoEls).forEach((video) => {
|
||||
video.src = '';
|
||||
video.load();
|
||||
});
|
||||
};
|
||||
}, []); // Only run once on mount
|
||||
|
||||
// Update status when videos load
|
||||
useEffect(() => {
|
||||
const videoCount = timelineElements.filter((el) => el.type === 'video').length;
|
||||
if (loadedVideos.size === videoCount && videoCount > 0) {
|
||||
setStatus('Ready to play');
|
||||
} else if (videoCount > 0) {
|
||||
setStatus(`Loading videos... (${loadedVideos.size}/${videoCount})`);
|
||||
} else {
|
||||
setStatus('Ready to play');
|
||||
}
|
||||
}, [loadedVideos, timelineElements]);
|
||||
|
||||
const handlePause = useCallback(() => {
|
||||
if (isPlaying) {
|
||||
setIsPlaying(false);
|
||||
pausedTimeRef.current = currentTime;
|
||||
|
||||
// Pause and mute all videos when pausing timeline
|
||||
Object.values(videoElements).forEach((video) => {
|
||||
if (!video.paused) {
|
||||
video.pause();
|
||||
}
|
||||
video.muted = true;
|
||||
});
|
||||
|
||||
// Reset video states tracking
|
||||
setVideoStates({});
|
||||
|
||||
if (animationRef.current) {
|
||||
animationRef.current.stop();
|
||||
}
|
||||
}
|
||||
}, [isPlaying, currentTime, videoElements]);
|
||||
|
||||
const exportVideo = async () => {
|
||||
setIsExporting(true);
|
||||
setExportProgress(0);
|
||||
setExportStatus('Starting export...');
|
||||
|
||||
try {
|
||||
setExportStatus('Loading FFmpeg...');
|
||||
|
||||
const ffmpeg = new FFmpeg();
|
||||
|
||||
ffmpeg.on('progress', ({ progress }) => {
|
||||
setExportProgress(Math.round(progress * 100));
|
||||
});
|
||||
|
||||
ffmpeg.on('log', ({ message }) => {
|
||||
console.log(message);
|
||||
});
|
||||
|
||||
const baseURL = 'https://unpkg.com/@ffmpeg/core@0.12.6/dist/esm';
|
||||
const coreURL = `${baseURL}/ffmpeg-core.js`;
|
||||
const wasmURL = `${baseURL}/ffmpeg-core.wasm`;
|
||||
|
||||
console.log('Converting JS coreURL...');
|
||||
const coreBlobURL = await toBlobURL(coreURL, 'text/javascript');
|
||||
console.log('JS coreURL ready:', coreBlobURL);
|
||||
|
||||
console.log('Converting WASM URL...');
|
||||
const wasmBlobURL = await toBlobURL(wasmURL, 'application/wasm');
|
||||
console.log('WASM URL ready:', wasmBlobURL);
|
||||
|
||||
console.log('Calling ffmpeg.load...');
|
||||
await ffmpeg.load({
|
||||
coreURL: coreBlobURL,
|
||||
wasmURL: wasmBlobURL,
|
||||
});
|
||||
console.log('FFmpeg loaded!');
|
||||
setExportProgress(20);
|
||||
|
||||
// Write arial.ttf font into FFmpeg FS (fetch from GitHub)
|
||||
setExportStatus('Loading font...');
|
||||
await ffmpeg.writeFile('arial.ttf', await fetchFile('https://raw.githubusercontent.com/ffmpegwasm/testdata/master/arial.ttf'));
|
||||
console.log('Font loaded!');
|
||||
setExportProgress(30);
|
||||
|
||||
// Download videos
|
||||
setExportStatus('Downloading videos...');
|
||||
const videos = timelineElements.filter((el) => el.type === 'video');
|
||||
|
||||
for (let i = 0; i < videos.length; i++) {
|
||||
await ffmpeg.writeFile(`input${i}.webm`, await fetchFile(videos[i].source));
|
||||
setExportProgress(30 + Math.round(((i + 1) / videos.length) * 30));
|
||||
}
|
||||
|
||||
// Generate your FFmpeg command, but be sure to include fontfile=/arial.ttf in all drawtext filters
|
||||
setExportStatus('Processing video...');
|
||||
let args = generateFFmpegCommand(false, true);
|
||||
|
||||
// Example: if your command uses drawtext filters, add fontfile argument like:
|
||||
// drawtext=fontfile=/arial.ttf:text='Your text':x=50:y=600:fontsize=24:fontcolor=white:borderw=1:bordercolor=black
|
||||
// Make sure your generateFFmpegCommand function inserts this correctly.
|
||||
|
||||
setExportProgress(70);
|
||||
await ffmpeg.exec(args);
|
||||
|
||||
// Download result
|
||||
setExportStatus('Downloading...');
|
||||
setExportProgress(90);
|
||||
|
||||
const fileData = await ffmpeg.readFile('output.mp4');
|
||||
const data = new Uint8Array(fileData);
|
||||
|
||||
const blob = new Blob([data.buffer], { type: 'video/mp4' });
|
||||
const url = URL.createObjectURL(blob);
|
||||
|
||||
const link = document.createElement('a');
|
||||
link.href = url;
|
||||
link.download = 'exported_video.mp4';
|
||||
link.click();
|
||||
URL.revokeObjectURL(url);
|
||||
|
||||
setExportProgress(100);
|
||||
setExportStatus('Complete!');
|
||||
|
||||
ffmpeg.terminate();
|
||||
} catch (error) {
|
||||
console.error('Export error:', error);
|
||||
setExportStatus(`Failed: ${error.message}`);
|
||||
} finally {
|
||||
setTimeout(() => {
|
||||
setIsExporting(false);
|
||||
setExportStatus('');
|
||||
setExportProgress(0);
|
||||
}, 3000);
|
||||
}
|
||||
};
|
||||
|
||||
// Get currently active elements based on timeline position
|
||||
const getActiveElements = useCallback(
|
||||
(time) => {
|
||||
return timelineElements.filter((element) => {
|
||||
const elementEndTime = element.startTime + element.duration;
|
||||
return time >= element.startTime && time < elementEndTime;
|
||||
});
|
||||
},
|
||||
[timelineElements],
|
||||
);
|
||||
|
||||
// Calculate which videos should be playing based on current time
|
||||
const getDesiredVideoStates = useCallback(
|
||||
(time) => {
|
||||
// Accept time as parameter
|
||||
const states = {};
|
||||
timelineElements.forEach((element) => {
|
||||
if (element.type === 'video') {
|
||||
const elementEndTime = element.startTime + element.duration;
|
||||
states[element.id] = time >= element.startTime && time < elementEndTime;
|
||||
}
|
||||
});
|
||||
return states;
|
||||
},
|
||||
[timelineElements], // Removed dependency on currentTime
|
||||
);
|
||||
|
||||
// Update video times based on timeline position - optimized to reduce seeking
|
||||
const updateVideoTimes = useCallback(
|
||||
(time) => {
|
||||
timelineElements.forEach((element) => {
|
||||
if (element.type === 'video' && videoElements[element.id]) {
|
||||
const video = videoElements[element.id];
|
||||
const elementEndTime = element.startTime + element.duration;
|
||||
|
||||
if (time >= element.startTime && time < elementEndTime) {
|
||||
const relativeTime = time - element.startTime;
|
||||
const videoTime = element.inPoint + relativeTime;
|
||||
|
||||
// Only seek if time difference is significant
|
||||
if (Math.abs(video.currentTime - videoTime) > 0.5) {
|
||||
video.currentTime = videoTime;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
[timelineElements, videoElements],
|
||||
);
|
||||
|
||||
// OPTIMIZED: Manage video play/pause states only when needed
|
||||
useEffect(() => {
|
||||
if (!isPlaying) return;
|
||||
|
||||
const desiredStates = getDesiredVideoStates(currentTime);
|
||||
|
||||
// Smarter play/pause without excessive updates
|
||||
Object.entries(desiredStates).forEach(([videoId, shouldPlay]) => {
|
||||
const video = videoElements[videoId];
|
||||
const isCurrentlyPlaying = !video?.paused;
|
||||
|
||||
if (video) {
|
||||
if (shouldPlay && !isCurrentlyPlaying) {
|
||||
video.muted = false;
|
||||
video.play().catch((e) => console.warn('Video play failed:', e));
|
||||
} else if (!shouldPlay && isCurrentlyPlaying) {
|
||||
video.pause();
|
||||
video.muted = true;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
setVideoStates(desiredStates);
|
||||
}, [currentTime, isPlaying, videoElements, getDesiredVideoStates]);
|
||||
|
||||
const animate = useCallback(() => {
|
||||
if (!isPlaying) return;
|
||||
|
||||
const now = Date.now() / 1000;
|
||||
const newTime = pausedTimeRef.current + (now - startTimeRef.current);
|
||||
|
||||
if (newTime >= totalDuration) {
|
||||
handlePause();
|
||||
handleSeek(0); // ⬅️ Reset timeline
|
||||
return;
|
||||
}
|
||||
|
||||
if (newTime - lastUpdateRef.current >= 0.05) {
|
||||
lastUpdateRef.current = newTime;
|
||||
setCurrentTime(newTime);
|
||||
updateVideoTimes(newTime);
|
||||
|
||||
if (layerRef.current) {
|
||||
layerRef.current.batchDraw();
|
||||
}
|
||||
}
|
||||
}, [isPlaying, totalDuration, updateVideoTimes, handlePause]);
|
||||
|
||||
// Start animation loop - using requestAnimationFrame for better performance
|
||||
useEffect(() => {
|
||||
if (isPlaying) {
|
||||
let animationId;
|
||||
|
||||
const animateFrame = () => {
|
||||
animate();
|
||||
animationId = requestAnimationFrame(animateFrame);
|
||||
};
|
||||
|
||||
animationId = requestAnimationFrame(animateFrame);
|
||||
animationRef.current = { stop: () => cancelAnimationFrame(animationId) };
|
||||
|
||||
return () => {
|
||||
if (animationRef.current) {
|
||||
animationRef.current.stop();
|
||||
}
|
||||
};
|
||||
}
|
||||
}, [isPlaying, animate]);
|
||||
|
||||
const handlePlay = () => {
|
||||
if (!isPlaying) {
|
||||
setIsPlaying(true);
|
||||
startTimeRef.current = Date.now() / 1000;
|
||||
lastUpdateRef.current = 0; // ✅ Reset debounce tracker
|
||||
setStatus('');
|
||||
}
|
||||
};
|
||||
|
||||
const handleSeek = (time) => {
|
||||
const clampedTime = Math.max(0, Math.min(time, totalDuration));
|
||||
setCurrentTime(clampedTime);
|
||||
pausedTimeRef.current = clampedTime;
|
||||
updateVideoTimes(clampedTime);
|
||||
|
||||
// Reset video states when seeking to force re-evaluation
|
||||
setVideoStates({});
|
||||
|
||||
if (layerRef.current) {
|
||||
layerRef.current.draw();
|
||||
}
|
||||
};
|
||||
|
||||
const handleReset = () => {
|
||||
handlePause();
|
||||
handleSeek(0);
|
||||
lastUpdateRef.current = 0; // ✅ Reset debounce tracker
|
||||
|
||||
// Ensure all videos are muted
|
||||
Object.values(videoElements).forEach((video) => {
|
||||
video.muted = true;
|
||||
});
|
||||
};
|
||||
|
||||
const activeElements = getActiveElements(currentTime);
|
||||
|
||||
return (
|
||||
<div style={{ width: dimensions.width, height: dimensions.height }} className="rounded-3xl">
|
||||
<VideoPreview
|
||||
// Dimensions
|
||||
dimensions={dimensions}
|
||||
// Timeline state
|
||||
currentTime={currentTime}
|
||||
totalDuration={totalDuration}
|
||||
isPlaying={isPlaying}
|
||||
status={status}
|
||||
// Export state
|
||||
isExporting={isExporting}
|
||||
exportProgress={exportProgress}
|
||||
exportStatus={exportStatus}
|
||||
// Data
|
||||
timelineElements={timelineElements}
|
||||
activeElements={activeElements}
|
||||
videoElements={videoElements}
|
||||
loadedVideos={loadedVideos}
|
||||
videoStates={videoStates}
|
||||
ffmpegCommand={ffmpegCommand}
|
||||
// Event handlers
|
||||
handlePlay={handlePlay}
|
||||
handlePause={handlePause}
|
||||
handleReset={handleReset}
|
||||
handleSeek={handleSeek}
|
||||
copyFFmpegCommand={copyFFmpegCommand}
|
||||
exportVideo={exportVideo}
|
||||
// Refs
|
||||
layerRef={layerRef}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default VideoEditor;
|
||||
@@ -0,0 +1,93 @@
|
||||
// Use minimal react-konva core to avoid Node.js dependencies
|
||||
import 'konva/lib/Animation';
|
||||
import 'konva/lib/shapes/Image';
|
||||
import 'konva/lib/shapes/Text';
|
||||
import { Image, Layer, Stage, Text } from 'react-konva/lib/ReactKonvaCore';
|
||||
|
||||
const VideoPreview = ({
|
||||
// Dimensions
|
||||
dimensions,
|
||||
|
||||
// Timeline state
|
||||
currentTime,
|
||||
totalDuration,
|
||||
isPlaying,
|
||||
status,
|
||||
|
||||
// Export state
|
||||
isExporting,
|
||||
exportProgress,
|
||||
exportStatus,
|
||||
|
||||
// Data
|
||||
timelineElements,
|
||||
activeElements,
|
||||
videoElements,
|
||||
loadedVideos,
|
||||
videoStates,
|
||||
ffmpegCommand,
|
||||
|
||||
// Event handlers
|
||||
handlePlay,
|
||||
handlePause,
|
||||
handleReset,
|
||||
handleSeek,
|
||||
copyFFmpegCommand,
|
||||
exportVideo,
|
||||
|
||||
// Refs
|
||||
layerRef,
|
||||
}) => {
|
||||
return (
|
||||
<div>
|
||||
<Stage width={dimensions.width} height={dimensions.height} className="">
|
||||
<Layer ref={layerRef}>
|
||||
{activeElements.map((element) => {
|
||||
if (element.type === 'video' && videoElements[element.id]) {
|
||||
return (
|
||||
<Image
|
||||
key={element.id}
|
||||
image={videoElements[element.id]}
|
||||
x={element.x}
|
||||
y={element.y}
|
||||
width={element.width}
|
||||
height={element.height}
|
||||
draggable
|
||||
/>
|
||||
);
|
||||
} else if (element.type === 'text') {
|
||||
return (
|
||||
<Text
|
||||
key={element.id}
|
||||
text={element.text}
|
||||
x={element.x}
|
||||
y={element.y}
|
||||
fontSize={element.fontSize}
|
||||
fill={element.fill}
|
||||
stroke={element.stroke}
|
||||
strokeWidth={element.strokeWidth}
|
||||
draggable
|
||||
/>
|
||||
);
|
||||
} else if (element.type === 'image' && element.imageElement) {
|
||||
return (
|
||||
<Image
|
||||
key={element.id}
|
||||
image={element.imageElement}
|
||||
x={element.x}
|
||||
y={element.y}
|
||||
width={element.width}
|
||||
height={element.height}
|
||||
draggable
|
||||
/>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
})}
|
||||
</Layer>
|
||||
</Stage>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default VideoPreview;
|
||||
@@ -1,5 +1,6 @@
|
||||
import React, { useEffect, useLayoutEffect, useState } from 'react';
|
||||
import { LAYOUT_CONSTANTS, calculateResponsiveScale } from '../utils/layout-constants';
|
||||
import VideoEditor from './canvas/video-editor';
|
||||
|
||||
const useResponsiveCanvas = (maxWidth: number = 350) => {
|
||||
const [scale, setScale] = useState(() => calculateResponsiveScale(maxWidth));
|
||||
@@ -89,7 +90,7 @@ const EditorCanvas: React.FC<EditorCanvasProps> = ({ maxWidth = 350 }) => {
|
||||
console.log(`Canvas coordinates: x=${x}, y=${y}`);
|
||||
}}
|
||||
>
|
||||
{/* Your canvas content goes here */}
|
||||
<VideoEditor width={LAYOUT_CONSTANTS.CANVAS_WIDTH} height={LAYOUT_CONSTANTS.CANVAS_HEIGHT} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
Reference in New Issue
Block a user