Files
memefast/resources/js/modules/editor/partials/editor-canvas.tsx
2025-06-14 11:25:13 +08:00

563 lines
20 KiB
TypeScript

import useMediaStore from '@/stores/MediaStore';
import { FFmpeg } from '@ffmpeg/ffmpeg';
import { fetchFile, toBlobURL } from '@ffmpeg/util';
import 'konva/lib/Animation';
import 'konva/lib/shapes/Image';
import 'konva/lib/shapes/Text';
import React, { useCallback, useEffect, useLayoutEffect, useRef, useState } from 'react';
import { Image, Layer, Stage, Text } from 'react-konva/lib/ReactKonvaCore';
import { LAYOUT_CONSTANTS, calculateResponsiveScale } from '../utils/layout-constants';
const useResponsiveCanvas = (maxWidth: number = 350) => {
const [scale, setScale] = useState(() => calculateResponsiveScale(maxWidth));
useLayoutEffect(() => {
setScale(calculateResponsiveScale(maxWidth));
}, [maxWidth]);
useEffect(() => {
const handleResize = () => {
setScale(calculateResponsiveScale(maxWidth));
};
handleResize();
window.addEventListener('resize', handleResize);
window.addEventListener('orientationchange', handleResize);
let resizeObserver: ResizeObserver | undefined;
if (window.ResizeObserver) {
resizeObserver = new ResizeObserver(handleResize);
resizeObserver.observe(document.body);
}
let mutationObserver: MutationObserver | undefined;
if (window.MutationObserver) {
mutationObserver = new MutationObserver(() => {
setTimeout(handleResize, 50);
});
mutationObserver.observe(document.documentElement, {
attributes: true,
attributeFilter: ['style'],
});
}
return () => {
window.removeEventListener('resize', handleResize);
window.removeEventListener('orientationchange', handleResize);
if (resizeObserver) resizeObserver.disconnect();
if (mutationObserver) mutationObserver.disconnect();
};
}, [maxWidth]);
return scale;
};
interface EditorCanvasProps {
maxWidth?: number;
}
const EditorCanvas: React.FC<EditorCanvasProps> = ({ maxWidth = 350 }) => {
const scale = useResponsiveCanvas(maxWidth);
const displayWidth = LAYOUT_CONSTANTS.CANVAS_WIDTH * scale;
const displayHeight = LAYOUT_CONSTANTS.CANVAS_HEIGHT * scale;
const { selectedBackground, selectedMeme } = useMediaStore();
// Timeline state (hidden from UI)
const [timelineElements, setTimelineElements] = useState([]);
const [currentTime, setCurrentTime] = useState(0);
const [isPlaying, setIsPlaying] = useState(false);
const [videoElements, setVideoElements] = useState({});
const [imageElements, setImageElements] = useState({});
const [loadedVideos, setLoadedVideos] = useState(new Set());
const [videoStates, setVideoStates] = useState({});
const [isExporting, setIsExporting] = useState(false);
const animationRef = useRef(null);
const layerRef = useRef(null);
const startTimeRef = useRef(0);
const pausedTimeRef = useRef(0);
const lastUpdateRef = useRef(0);
// Generate timeline elements from MediaStore selections
useEffect(() => {
const elements = [];
// Background (full duration)
if (selectedBackground) {
elements.push({
id: 'background',
type: 'image',
source: selectedBackground.media_url,
poster: selectedBackground.media_url,
name: 'Background',
startTime: 0,
layer: 1,
duration: 10, // Default 10 seconds
x: 0,
y: 0,
width: LAYOUT_CONSTANTS.CANVAS_WIDTH,
height: LAYOUT_CONSTANTS.CANVAS_HEIGHT,
});
}
// Meme overlay (shorter duration, centered)
if (selectedMeme) {
const memeWidth = LAYOUT_CONSTANTS.CANVAS_WIDTH * 0.8;
const memeHeight = LAYOUT_CONSTANTS.CANVAS_HEIGHT * 0.6;
const memeX = (LAYOUT_CONSTANTS.CANVAS_WIDTH - memeWidth) / 2;
const memeY = (LAYOUT_CONSTANTS.CANVAS_HEIGHT - memeHeight) / 2;
elements.push({
id: 'meme',
type: 'video',
source_webm: selectedMeme.webm_url,
source_mov: selectedMeme.mov_url,
poster: selectedMeme.webp_url,
name: 'Meme',
startTime: 0,
layer: 0,
inPoint: 0,
duration: 6,
x: memeX,
y: memeY,
width: memeWidth,
height: memeHeight,
});
}
setTimelineElements(elements);
}, [selectedBackground, selectedMeme]);
// Calculate total duration
const totalDuration = Math.max(...timelineElements.map((el) => el.startTime + el.duration), 1);
// Update video times
const updateVideoTimes = useCallback(
(time) => {
timelineElements.forEach((element) => {
if (element.type === 'video' && videoElements[element.id]) {
const video = videoElements[element.id];
const elementEndTime = element.startTime + element.duration;
if (time >= element.startTime && time < elementEndTime) {
const relativeTime = time - element.startTime;
const videoTime = (element.inPoint || 0) + relativeTime;
if (Math.abs(video.currentTime - videoTime) > 0.1) {
video.currentTime = videoTime;
}
}
}
});
},
[timelineElements, videoElements],
);
// Create media elements when timeline changes
useEffect(() => {
const videoEls = {};
const imageEls = {};
timelineElements.forEach((element) => {
if (element.type === 'video') {
const video = document.createElement('video');
video.poster = element.poster;
video.crossOrigin = 'anonymous';
video.muted = true;
video.preload = 'auto';
video.playsInline = true;
video.controls = false;
video.loop = true;
if (element.source) {
video.src = element.source;
} else if (element.source_webm && element.source_mov) {
const sourceWebm = document.createElement('source');
sourceWebm.src = element.source_webm;
sourceWebm.type = 'video/webm';
const sourceMov = document.createElement('source');
sourceMov.src = element.source_mov;
sourceMov.type = 'video/mp4';
video.appendChild(sourceWebm);
video.appendChild(sourceMov);
}
// Capture element data to avoid variable shadowing
const elementId = element.id;
const elementData = { ...element };
video.addEventListener('loadedmetadata', () => {
// Set initial time position based on current timeline position
if (currentTime >= elementData.startTime && currentTime < elementData.startTime + elementData.duration) {
const relativeTime = currentTime - elementData.startTime;
const videoTime = (elementData.inPoint || 0) + relativeTime;
video.currentTime = videoTime;
}
setLoadedVideos((prev) => {
const newSet = new Set(prev);
newSet.add(elementId);
return newSet;
});
// Force a canvas redraw after video loads
if (layerRef.current) {
layerRef.current.batchDraw();
}
});
video.addEventListener('loadeddata', () => {
// Also trigger redraw when video data is loaded
if (layerRef.current) {
layerRef.current.batchDraw();
}
});
videoEls[element.id] = video;
} else if (element.type === 'image') {
const img = new window.Image();
img.crossOrigin = 'anonymous';
img.src = element.source;
img.addEventListener('load', () => {
setLoadedVideos((prev) => {
const newSet = new Set(prev);
newSet.add(element.id);
return newSet;
});
// Force a canvas redraw after image loads
if (layerRef.current) {
layerRef.current.batchDraw();
}
});
imageEls[element.id] = img;
}
});
setVideoElements(videoEls);
setImageElements(imageEls);
return () => {
Object.values(videoEls).forEach((video) => {
video.src = '';
video.load();
});
};
}, [timelineElements]);
// Update video times whenever currentTime changes (for paused state)
useEffect(() => {
if (!isPlaying) {
updateVideoTimes(currentTime);
}
}, [currentTime, updateVideoTimes, isPlaying]);
// Get active elements at current time
const getActiveElements = useCallback(
(time) => {
return timelineElements.filter((element) => {
const elementEndTime = element.startTime + element.duration;
return time >= element.startTime && time < elementEndTime;
});
},
[timelineElements],
);
// Manage video playback states
useEffect(() => {
if (!isPlaying) return;
timelineElements.forEach((element) => {
if (element.type === 'video' && videoElements[element.id]) {
const video = videoElements[element.id];
const elementEndTime = element.startTime + element.duration;
const shouldPlay = currentTime >= element.startTime && currentTime < elementEndTime;
if (shouldPlay && video.paused) {
video.muted = false;
video.play().catch(() => {});
} else if (!shouldPlay && !video.paused) {
video.pause();
video.muted = true;
}
}
});
}, [currentTime, isPlaying, timelineElements, videoElements]);
// Animation loop
const animate = useCallback(() => {
if (!isPlaying) return;
const now = Date.now() / 1000;
const newTime = pausedTimeRef.current + (now - startTimeRef.current);
if (newTime >= totalDuration) {
setIsPlaying(false);
setCurrentTime(0);
pausedTimeRef.current = 0;
Object.values(videoElements).forEach((video) => {
video.pause();
video.muted = true;
});
return;
}
if (newTime - lastUpdateRef.current >= 0.05) {
lastUpdateRef.current = newTime;
setCurrentTime(newTime);
if (layerRef.current) {
layerRef.current.batchDraw();
}
}
}, [isPlaying, totalDuration, videoElements]);
useEffect(() => {
if (isPlaying) {
let animationId;
const animateFrame = () => {
animate();
animationId = requestAnimationFrame(animateFrame);
};
animationId = requestAnimationFrame(animateFrame);
animationRef.current = { stop: () => cancelAnimationFrame(animationId) };
return () => {
if (animationRef.current) {
animationRef.current.stop();
}
};
}
}, [isPlaying, animate]);
// Export function for Download button
const exportVideo = async () => {
if (isExporting) return;
setIsExporting(true);
try {
const ffmpeg = new FFmpeg();
const baseURL = 'https://unpkg.com/@ffmpeg/core@0.12.6/dist/esm';
const coreURL = `${baseURL}/ffmpeg-core.js`;
const wasmURL = `${baseURL}/ffmpeg-core.wasm`;
const coreBlobURL = await toBlobURL(coreURL, 'text/javascript');
const wasmBlobURL = await toBlobURL(wasmURL, 'application/wasm');
await ffmpeg.load({
coreURL: coreBlobURL,
wasmURL: wasmBlobURL,
});
// Download media files
const videos = timelineElements.filter((el) => el.type === 'video');
const images = timelineElements.filter((el) => el.type === 'image');
for (let i = 0; i < videos.length; i++) {
await ffmpeg.writeFile(`video${i}.webm`, await fetchFile(videos[i].source));
}
for (let i = 0; i < images.length; i++) {
await ffmpeg.writeFile(`image${i}.jpg`, await fetchFile(images[i].source));
}
// Generate FFmpeg command
let inputArgs = [];
let filters = [];
// Add inputs
videos.forEach((v, i) => {
inputArgs.push('-i', `video${i}.webm`);
});
images.forEach((img, i) => {
inputArgs.push('-i', `image${videos.length + i}.jpg`);
});
// Base canvas
filters.push(`color=black:size=${LAYOUT_CONSTANTS.CANVAS_WIDTH}x${LAYOUT_CONSTANTS.CANVAS_HEIGHT}:duration=${totalDuration}[base]`);
let currentLayer = 'base';
// Process images first (backgrounds)
images.forEach((img, i) => {
const inputIndex = videos.length + i;
filters.push(`[${inputIndex}:v]scale=${Math.round(img.width)}:${Math.round(img.height)}[img${i}_scale]`);
filters.push(
`[${currentLayer}][img${i}_scale]overlay=${Math.round(img.x)}:${Math.round(img.y)}:enable='between(t,${img.startTime},${img.startTime + img.duration})'[img${i}_out]`,
);
currentLayer = `img${i}_out`;
});
// Process videos
videos.forEach((v, i) => {
filters.push(`[${i}:v]trim=start=${v.inPoint || 0}:duration=${v.duration},setpts=PTS-STARTPTS[v${i}_trim]`);
filters.push(`[v${i}_trim]scale=${Math.round(v.width)}:${Math.round(v.height)}[v${i}_scale]`);
filters.push(
`[${currentLayer}][v${i}_scale]overlay=${Math.round(v.x)}:${Math.round(v.y)}:enable='between(t,${v.startTime},${v.startTime + v.duration})'[v${i}_out]`,
);
currentLayer = `v${i}_out`;
});
const filterComplex = filters.join('; ');
const args = [
...inputArgs,
'-filter_complex',
filterComplex,
'-map',
`[${currentLayer}]`,
'-c:v',
'libx264',
'-pix_fmt',
'yuv420p',
'-r',
'30',
'-t',
totalDuration.toString(),
'output.mp4',
];
await ffmpeg.exec(args);
const fileData = await ffmpeg.readFile('output.mp4');
const data = new Uint8Array(fileData);
const blob = new Blob([data.buffer], { type: 'video/mp4' });
const url = URL.createObjectURL(blob);
const link = document.createElement('a');
link.href = url;
link.download = 'meme_video.mp4';
link.click();
URL.revokeObjectURL(url);
ffmpeg.terminate();
} catch (error) {
console.error('Export error:', error);
} finally {
setIsExporting(false);
}
};
// Expose controls to parent (will be used by buttons)
useEffect(() => {
window.timelineControls = {
play: () => {
if (!isPlaying) {
setIsPlaying(true);
startTimeRef.current = Date.now() / 1000;
lastUpdateRef.current = 0;
}
},
pause: () => {
if (isPlaying) {
setIsPlaying(false);
pausedTimeRef.current = currentTime;
Object.values(videoElements).forEach((video) => {
if (!video.paused) {
video.pause();
}
video.muted = true;
});
if (animationRef.current) {
animationRef.current.stop();
}
}
},
reset: () => {
setIsPlaying(false);
setCurrentTime(0);
pausedTimeRef.current = 0;
Object.values(videoElements).forEach((video) => {
video.pause();
video.muted = true;
video.currentTime = 0;
});
},
export: exportVideo,
isPlaying,
isExporting,
};
}, [isPlaying, isExporting, currentTime, videoElements, exportVideo]);
const activeElements = getActiveElements(currentTime);
return (
<div className="flex w-full justify-center">
<div
style={{
width: `${displayWidth}px`,
height: `${displayHeight}px`,
}}
>
<div
className="origin-top-left overflow-hidden rounded-3xl border bg-black shadow-sm"
style={{
width: `${LAYOUT_CONSTANTS.CANVAS_WIDTH}px`,
height: `${LAYOUT_CONSTANTS.CANVAS_HEIGHT}px`,
transform: `scale(${scale})`,
}}
>
<Stage width={LAYOUT_CONSTANTS.CANVAS_WIDTH} height={LAYOUT_CONSTANTS.CANVAS_HEIGHT}>
<Layer ref={layerRef}>
{activeElements.map((element) => {
if (element.type === 'video' && videoElements[element.id]) {
return (
<Image
key={element.id}
image={videoElements[element.id]}
x={element.x}
y={element.y}
width={element.width}
height={element.height}
/>
);
} else if (element.type === 'image' && imageElements[element.id]) {
return (
<Image
key={element.id}
image={imageElements[element.id]}
x={element.x}
y={element.y}
width={element.width}
height={element.height}
/>
);
} else if (element.type === 'text') {
return (
<Text
key={element.id}
text={element.text}
x={element.x}
y={element.y}
fontSize={element.fontSize}
fill={element.fill}
stroke={element.stroke}
strokeWidth={element.strokeWidth}
/>
);
}
return null;
})}
</Layer>
</Stage>
</div>
</div>
</div>
);
};
export default EditorCanvas;