This commit is contained in:
ct
2025-06-14 19:20:21 +08:00
parent b3810f5ca4
commit 0f7fa96c40
2 changed files with 65 additions and 551 deletions

View File

@@ -1,11 +1,4 @@
import useMediaStore from '@/stores/MediaStore'; import React, { useEffect, useLayoutEffect, useState } from 'react';
import { FFmpeg } from '@ffmpeg/ffmpeg';
import { fetchFile, toBlobURL } from '@ffmpeg/util';
import 'konva/lib/Animation';
import 'konva/lib/shapes/Image';
import 'konva/lib/shapes/Text';
import React, { useCallback, useEffect, useLayoutEffect, useRef, useState } from 'react';
import { Image, Layer, Stage, Text } from 'react-konva/lib/ReactKonvaCore';
import { LAYOUT_CONSTANTS, calculateResponsiveScale } from '../utils/layout-constants'; import { LAYOUT_CONSTANTS, calculateResponsiveScale } from '../utils/layout-constants';
const useResponsiveCanvas = (maxWidth: number = 350) => { const useResponsiveCanvas = (maxWidth: number = 350) => {
@@ -20,16 +13,21 @@ const useResponsiveCanvas = (maxWidth: number = 350) => {
setScale(calculateResponsiveScale(maxWidth)); setScale(calculateResponsiveScale(maxWidth));
}; };
// Update immediately
handleResize(); handleResize();
// Event listeners
window.addEventListener('resize', handleResize); window.addEventListener('resize', handleResize);
window.addEventListener('orientationchange', handleResize); window.addEventListener('orientationchange', handleResize);
// ResizeObserver for more reliable detection
let resizeObserver: ResizeObserver | undefined; let resizeObserver: ResizeObserver | undefined;
if (window.ResizeObserver) { if (window.ResizeObserver) {
resizeObserver = new ResizeObserver(handleResize); resizeObserver = new ResizeObserver(handleResize);
resizeObserver.observe(document.body); resizeObserver.observe(document.body);
} }
// MutationObserver for dev tools detection
let mutationObserver: MutationObserver | undefined; let mutationObserver: MutationObserver | undefined;
if (window.MutationObserver) { if (window.MutationObserver) {
mutationObserver = new MutationObserver(() => { mutationObserver = new MutationObserver(() => {
@@ -61,439 +59,14 @@ const EditorCanvas: React.FC<EditorCanvasProps> = ({ maxWidth = 350 }) => {
const displayWidth = LAYOUT_CONSTANTS.CANVAS_WIDTH * scale; const displayWidth = LAYOUT_CONSTANTS.CANVAS_WIDTH * scale;
const displayHeight = LAYOUT_CONSTANTS.CANVAS_HEIGHT * scale; const displayHeight = LAYOUT_CONSTANTS.CANVAS_HEIGHT * scale;
const { selectedBackground, selectedMeme } = useMediaStore(); const convertCoordinates = (e) => {
const rect = e.currentTarget.getBoundingClientRect();
// Timeline state (hidden from UI) return {
const [timelineElements, setTimelineElements] = useState([]); x: (e.clientX - rect.left) / scale,
const [currentTime, setCurrentTime] = useState(0); y: (e.clientY - rect.top) / scale,
const [isPlaying, setIsPlaying] = useState(false);
const [videoElements, setVideoElements] = useState({});
const [imageElements, setImageElements] = useState({});
const [loadedVideos, setLoadedVideos] = useState(new Set());
const [videoStates, setVideoStates] = useState({});
const [isExporting, setIsExporting] = useState(false);
const animationRef = useRef(null);
const layerRef = useRef(null);
const startTimeRef = useRef(0);
const pausedTimeRef = useRef(0);
const lastUpdateRef = useRef(0);
// Generate timeline elements from MediaStore selections
useEffect(() => {
const elements = [];
// Background (full duration)
if (selectedBackground) {
elements.push({
id: 'background',
type: 'image',
source: selectedBackground.media_url,
poster: selectedBackground.media_url,
name: 'Background',
startTime: 0,
layer: 1,
duration: 10, // Default 10 seconds
x: 0,
y: 0,
width: LAYOUT_CONSTANTS.CANVAS_WIDTH,
height: LAYOUT_CONSTANTS.CANVAS_HEIGHT,
});
}
// Meme overlay (shorter duration, centered)
if (selectedMeme) {
const memeWidth = LAYOUT_CONSTANTS.CANVAS_WIDTH * 0.8;
const memeHeight = LAYOUT_CONSTANTS.CANVAS_HEIGHT * 0.6;
const memeX = (LAYOUT_CONSTANTS.CANVAS_WIDTH - memeWidth) / 2;
const memeY = (LAYOUT_CONSTANTS.CANVAS_HEIGHT - memeHeight) / 2;
elements.push({
id: 'meme',
type: 'video',
source_webm: selectedMeme.webm_url,
source_mov: selectedMeme.mov_url,
poster: selectedMeme.webp_url,
name: 'Meme',
startTime: 0,
layer: 0,
inPoint: 0,
duration: 6,
x: memeX,
y: memeY,
width: memeWidth,
height: memeHeight,
});
}
setTimelineElements(elements);
}, [selectedBackground, selectedMeme]);
// Calculate total duration
const totalDuration = Math.max(...timelineElements.map((el) => el.startTime + el.duration), 1);
// Update video times
const updateVideoTimes = useCallback(
(time) => {
timelineElements.forEach((element) => {
if (element.type === 'video' && videoElements[element.id]) {
const video = videoElements[element.id];
const elementEndTime = element.startTime + element.duration;
if (time >= element.startTime && time < elementEndTime) {
const relativeTime = time - element.startTime;
const videoTime = (element.inPoint || 0) + relativeTime;
if (Math.abs(video.currentTime - videoTime) > 0.1) {
video.currentTime = videoTime;
}
}
}
});
},
[timelineElements, videoElements],
);
// Create media elements when timeline changes
useEffect(() => {
const videoEls = {};
const imageEls = {};
timelineElements.forEach((element) => {
if (element.type === 'video') {
const video = document.createElement('video');
video.poster = element.poster;
video.crossOrigin = 'anonymous';
video.muted = true;
video.preload = 'auto';
video.playsInline = true;
video.controls = false;
video.loop = true;
if (element.source) {
video.src = element.source;
} else if (element.source_webm && element.source_mov) {
const sourceWebm = document.createElement('source');
sourceWebm.src = element.source_webm;
sourceWebm.type = 'video/webm';
const sourceMov = document.createElement('source');
sourceMov.src = element.source_mov;
sourceMov.type = 'video/mp4';
video.appendChild(sourceWebm);
video.appendChild(sourceMov);
}
// Capture element data to avoid variable shadowing
const elementId = element.id;
const elementData = { ...element };
video.addEventListener('loadedmetadata', () => {
// Set initial time position based on current timeline position
if (currentTime >= elementData.startTime && currentTime < elementData.startTime + elementData.duration) {
const relativeTime = currentTime - elementData.startTime;
const videoTime = (elementData.inPoint || 0) + relativeTime;
video.currentTime = videoTime;
}
setLoadedVideos((prev) => {
const newSet = new Set(prev);
newSet.add(elementId);
return newSet;
});
// Force a canvas redraw after video loads
if (layerRef.current) {
layerRef.current.batchDraw();
}
});
video.addEventListener('loadeddata', () => {
// Also trigger redraw when video data is loaded
if (layerRef.current) {
layerRef.current.batchDraw();
}
});
videoEls[element.id] = video;
} else if (element.type === 'image') {
const img = new window.Image();
img.crossOrigin = 'anonymous';
img.src = element.source;
img.addEventListener('load', () => {
setLoadedVideos((prev) => {
const newSet = new Set(prev);
newSet.add(element.id);
return newSet;
});
// Force a canvas redraw after image loads
if (layerRef.current) {
layerRef.current.batchDraw();
}
});
imageEls[element.id] = img;
}
});
setVideoElements(videoEls);
setImageElements(imageEls);
return () => {
Object.values(videoEls).forEach((video) => {
video.src = '';
video.load();
});
}; };
}, [timelineElements]);
// Update video times whenever currentTime changes (for paused state)
useEffect(() => {
if (!isPlaying) {
updateVideoTimes(currentTime);
}
}, [currentTime, updateVideoTimes, isPlaying]);
// Get active elements at current time
const getActiveElements = useCallback(
(time) => {
return timelineElements.filter((element) => {
const elementEndTime = element.startTime + element.duration;
return time >= element.startTime && time < elementEndTime;
});
},
[timelineElements],
);
// Manage video playback states
useEffect(() => {
if (!isPlaying) return;
timelineElements.forEach((element) => {
if (element.type === 'video' && videoElements[element.id]) {
const video = videoElements[element.id];
const elementEndTime = element.startTime + element.duration;
const shouldPlay = currentTime >= element.startTime && currentTime < elementEndTime;
if (shouldPlay && video.paused) {
video.muted = false;
video.play().catch(() => {});
} else if (!shouldPlay && !video.paused) {
video.pause();
video.muted = true;
}
}
});
}, [currentTime, isPlaying, timelineElements, videoElements]);
// Animation loop
const animate = useCallback(() => {
if (!isPlaying) return;
const now = Date.now() / 1000;
const newTime = pausedTimeRef.current + (now - startTimeRef.current);
if (newTime >= totalDuration) {
setIsPlaying(false);
setCurrentTime(0);
pausedTimeRef.current = 0;
Object.values(videoElements).forEach((video) => {
video.pause();
video.muted = true;
});
return;
}
if (newTime - lastUpdateRef.current >= 0.05) {
lastUpdateRef.current = newTime;
setCurrentTime(newTime);
if (layerRef.current) {
layerRef.current.batchDraw();
}
}
}, [isPlaying, totalDuration, videoElements]);
useEffect(() => {
if (isPlaying) {
let animationId;
const animateFrame = () => {
animate();
animationId = requestAnimationFrame(animateFrame);
};
animationId = requestAnimationFrame(animateFrame);
animationRef.current = { stop: () => cancelAnimationFrame(animationId) };
return () => {
if (animationRef.current) {
animationRef.current.stop();
}
};
}
}, [isPlaying, animate]);
// Export function for Download button
const exportVideo = async () => {
if (isExporting) return;
setIsExporting(true);
try {
const ffmpeg = new FFmpeg();
const baseURL = 'https://unpkg.com/@ffmpeg/core@0.12.6/dist/esm';
const coreURL = `${baseURL}/ffmpeg-core.js`;
const wasmURL = `${baseURL}/ffmpeg-core.wasm`;
const coreBlobURL = await toBlobURL(coreURL, 'text/javascript');
const wasmBlobURL = await toBlobURL(wasmURL, 'application/wasm');
await ffmpeg.load({
coreURL: coreBlobURL,
wasmURL: wasmBlobURL,
});
// Download media files
const videos = timelineElements.filter((el) => el.type === 'video');
const images = timelineElements.filter((el) => el.type === 'image');
for (let i = 0; i < videos.length; i++) {
await ffmpeg.writeFile(`video${i}.webm`, await fetchFile(videos[i].source));
}
for (let i = 0; i < images.length; i++) {
await ffmpeg.writeFile(`image${i}.jpg`, await fetchFile(images[i].source));
}
// Generate FFmpeg command
let inputArgs = [];
let filters = [];
// Add inputs
videos.forEach((v, i) => {
inputArgs.push('-i', `video${i}.webm`);
});
images.forEach((img, i) => {
inputArgs.push('-i', `image${videos.length + i}.jpg`);
});
// Base canvas
filters.push(`color=black:size=${LAYOUT_CONSTANTS.CANVAS_WIDTH}x${LAYOUT_CONSTANTS.CANVAS_HEIGHT}:duration=${totalDuration}[base]`);
let currentLayer = 'base';
// Process images first (backgrounds)
images.forEach((img, i) => {
const inputIndex = videos.length + i;
filters.push(`[${inputIndex}:v]scale=${Math.round(img.width)}:${Math.round(img.height)}[img${i}_scale]`);
filters.push(
`[${currentLayer}][img${i}_scale]overlay=${Math.round(img.x)}:${Math.round(img.y)}:enable='between(t,${img.startTime},${img.startTime + img.duration})'[img${i}_out]`,
);
currentLayer = `img${i}_out`;
});
// Process videos
videos.forEach((v, i) => {
filters.push(`[${i}:v]trim=start=${v.inPoint || 0}:duration=${v.duration},setpts=PTS-STARTPTS[v${i}_trim]`);
filters.push(`[v${i}_trim]scale=${Math.round(v.width)}:${Math.round(v.height)}[v${i}_scale]`);
filters.push(
`[${currentLayer}][v${i}_scale]overlay=${Math.round(v.x)}:${Math.round(v.y)}:enable='between(t,${v.startTime},${v.startTime + v.duration})'[v${i}_out]`,
);
currentLayer = `v${i}_out`;
});
const filterComplex = filters.join('; ');
const args = [
...inputArgs,
'-filter_complex',
filterComplex,
'-map',
`[${currentLayer}]`,
'-c:v',
'libx264',
'-pix_fmt',
'yuv420p',
'-r',
'30',
'-t',
totalDuration.toString(),
'output.mp4',
];
await ffmpeg.exec(args);
const fileData = await ffmpeg.readFile('output.mp4');
const data = new Uint8Array(fileData);
const blob = new Blob([data.buffer], { type: 'video/mp4' });
const url = URL.createObjectURL(blob);
const link = document.createElement('a');
link.href = url;
link.download = 'meme_video.mp4';
link.click();
URL.revokeObjectURL(url);
ffmpeg.terminate();
} catch (error) {
console.error('Export error:', error);
} finally {
setIsExporting(false);
}
}; };
// Expose controls to parent (will be used by buttons)
useEffect(() => {
window.timelineControls = {
play: () => {
if (!isPlaying) {
setIsPlaying(true);
startTimeRef.current = Date.now() / 1000;
lastUpdateRef.current = 0;
}
},
pause: () => {
if (isPlaying) {
setIsPlaying(false);
pausedTimeRef.current = currentTime;
Object.values(videoElements).forEach((video) => {
if (!video.paused) {
video.pause();
}
video.muted = true;
});
if (animationRef.current) {
animationRef.current.stop();
}
}
},
reset: () => {
setIsPlaying(false);
setCurrentTime(0);
pausedTimeRef.current = 0;
Object.values(videoElements).forEach((video) => {
video.pause();
video.muted = true;
video.currentTime = 0;
});
},
export: exportVideo,
isPlaying,
isExporting,
};
}, [isPlaying, isExporting, currentTime, videoElements, exportVideo]);
const activeElements = getActiveElements(currentTime);
return ( return (
<div className="flex w-full justify-center"> <div className="flex w-full justify-center">
<div <div
@@ -503,56 +76,20 @@ const EditorCanvas: React.FC<EditorCanvasProps> = ({ maxWidth = 350 }) => {
}} }}
> >
<div <div
className="origin-top-left overflow-hidden rounded-3xl border bg-black shadow-sm" className="origin-top-left rounded-3xl border bg-white shadow-sm dark:bg-black"
style={{ style={{
width: `${LAYOUT_CONSTANTS.CANVAS_WIDTH}px`, width: `${LAYOUT_CONSTANTS.CANVAS_WIDTH}px`,
height: `${LAYOUT_CONSTANTS.CANVAS_HEIGHT}px`, height: `${LAYOUT_CONSTANTS.CANVAS_HEIGHT}px`,
transform: `scale(${scale})`, transform: `scale(${scale})`,
}} }}
onClick={(e) => {
const { x, y } = convertCoordinates(e);
// Handle your canvas interactions here
// x, y are the actual canvas coordinates (0-720, 0-1280)
console.log(`Canvas coordinates: x=${x}, y=${y}`);
}}
> >
<Stage width={LAYOUT_CONSTANTS.CANVAS_WIDTH} height={LAYOUT_CONSTANTS.CANVAS_HEIGHT}> {/* Your canvas content goes here */}
<Layer ref={layerRef}>
{activeElements.map((element) => {
if (element.type === 'video' && videoElements[element.id]) {
return (
<Image
key={element.id}
image={videoElements[element.id]}
x={element.x}
y={element.y}
width={element.width}
height={element.height}
/>
);
} else if (element.type === 'image' && imageElements[element.id]) {
return (
<Image
key={element.id}
image={imageElements[element.id]}
x={element.x}
y={element.y}
width={element.width}
height={element.height}
/>
);
} else if (element.type === 'text') {
return (
<Text
key={element.id}
text={element.text}
x={element.x}
y={element.y}
fontSize={element.fontSize}
fill={element.fill}
stroke={element.stroke}
strokeWidth={element.strokeWidth}
/>
);
}
return null;
})}
</Layer>
</Stage>
</div> </div>
</div> </div>
</div> </div>

View File

@@ -1,80 +1,57 @@
'use client'; "use client"
import { Button } from '@/components/ui/button'; import { Button } from "@/components/ui/button"
import { cn } from '@/lib/utils'; import { cn } from "@/lib/utils"
import { Download, Edit3, Loader2, Pause, Play, Type } from 'lucide-react'; import { Play, Type, Edit3, Download } from "lucide-react"
import { useEffect, useState } from 'react';
const EditorControls = ({ className = '', onEditClick = () => {}, isEditActive = false }) => { const EditorControls = ({ className = '', onEditClick = () => {}, isEditActive = false }) => {
const [isPlaying, setIsPlaying] = useState(false); return (
const [isExporting, setIsExporting] = useState(false); <div className={cn("flex items-center justify-center gap-2", className)}>
<Button
variant="ghost"
size="icon"
className="w-12 h-12 rounded-full shadow-sm border "
>
<Play className="h-8 w-8 " />
</Button>
// Listen for timeline state changes {/* <Button
useEffect(() => { variant="ghost"
const checkTimelineState = () => { size="icon"
if (window.timelineControls) { className="w-12 h-12 rounded-full shadow-sm border "
setIsPlaying(window.timelineControls.isPlaying); >
setIsExporting(window.timelineControls.isExporting); <span className="text-sm font-medium ">9:16</span>
} </Button> */}
};
const interval = setInterval(checkTimelineState, 100);
return () => clearInterval(interval);
}, []);
const handlePlayPause = () => { <Button
if (window.timelineControls) { variant="ghost"
if (isPlaying) { size="icon"
window.timelineControls.pause(); className="w-12 h-12 rounded-full shadow-sm border "
} else { >
window.timelineControls.play(); <Type className="h-8 w-8 " />
} </Button>
}
};
const handleExport = () => { <Button
if (window.timelineControls && !isExporting) { id="edit"
window.timelineControls.export(); variant={isEditActive ? "default" : "ghost"}
} size="icon"
}; className="w-12 h-12 rounded-full shadow-sm border"
onClick={onEditClick}
>
<Edit3 className={`h-8 w-8 ${isEditActive ? "text-white" : ""}`} />
</Button>
return ( <Button
<div className={cn('flex items-center justify-center gap-2', className)}> variant="ghost"
<Button size="icon"
variant="default" className="w-12 h-12 rounded-full shadow-sm border "
size="icon" >
className="h-12 w-12 rounded-full border shadow-sm" <Download className="h-8 w-8 " />
onClick={handlePlayPause} </Button>
disabled={!window.timelineControls} </div>
> )
{isPlaying ? <Pause className="h-8 w-8" /> : <Play className="h-8 w-8" />} }
</Button>
<Button variant="default" size="icon" className="h-12 w-12 rounded-full border shadow-sm">
<Type className="h-8 w-8" />
</Button>
<Button
id="edit"
variant={isEditActive ? 'default' : 'default'}
size="icon"
className="h-12 w-12 rounded-full border shadow-sm"
onClick={onEditClick}
>
<Edit3 className={`h-8 w-8 ${isEditActive ? 'text-white' : ''}`} />
</Button>
<Button
variant="default"
size="icon"
className="h-12 w-12 rounded-full border shadow-sm"
onClick={handleExport}
disabled={isExporting || !window.timelineControls}
>
{isExporting ? <Loader2 className="h-8 w-8 animate-spin" /> : <Download className="h-8 w-8" />}
</Button>
</div>
);
};
export default EditorControls; export default EditorControls;