This commit is contained in:
ct
2025-06-28 12:54:30 +08:00
parent f8dc4c01f2
commit fe1066583a
13 changed files with 781 additions and 175 deletions

View File

@@ -57,7 +57,7 @@ const sampleTimelineElements = [
startTime: 0,
layer: 2,
duration: 4,
x: 90,
x: 360, // Center horizontally (720/2)
y: 180,
fontSize: 40,
fontWeight: 'bold',
@@ -67,6 +67,9 @@ const sampleTimelineElements = [
stroke: '#000000',
strokeWidth: 3,
rotation: 0,
// Add text width properties for consistent rendering
fixedWidth: 576, // 80% of 720px canvas width
offsetX: 288, // Half of fixedWidth for center alignment
},
{
id: '5',
@@ -75,7 +78,7 @@ const sampleTimelineElements = [
startTime: 3,
layer: 3,
duration: 4,
x: 50,
x: 360, // Center horizontally (720/2)
y: 650,
fontSize: 20,
fontWeight: 'bold',
@@ -85,6 +88,9 @@ const sampleTimelineElements = [
stroke: '#ff0000',
strokeWidth: 2,
rotation: 0,
// Add text width properties for consistent rendering
fixedWidth: 576, // 80% of 720px canvas width
offsetX: 288, // Half of fixedWidth for center alignment
},
{
id: '6',

View File

@@ -1,28 +1,10 @@
import { FFmpeg } from '@ffmpeg/ffmpeg';
import { fetchFile, toBlobURL } from '@ffmpeg/util';
import Konva from 'konva';
import { useCallback, useEffect, useMemo, useState } from 'react';
// Font configuration mapping
const FONT_CONFIG = {
Montserrat: {
normal: '/fonts/Montserrat/static/Montserrat-Regular.ttf',
bold: '/fonts/Montserrat/static/Montserrat-Bold.ttf',
italic: '/fonts/Montserrat/static/Montserrat-Italic.ttf',
boldItalic: '/fonts/Montserrat/static/Montserrat-BoldItalic.ttf',
},
Arial: {
normal: '/arial.ttf',
bold: '/arial.ttf',
italic: '/arial.ttf',
boldItalic: '/arial.ttf',
},
Bungee: {
normal: '/fonts/Bungee/Bungee-Regular.ttf',
bold: '/fonts/Bungee/Bungee-Regular.ttf',
italic: '/fonts/Bungee/Bungee-Regular.ttf',
boldItalic: '/fonts/Bungee/Bungee-Regular.ttf',
},
};
// Import centralized font management
import { getFontStyle, loadTimelineFonts, WATERMARK_CONFIG } from '@/modules/editor/fonts';
const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
const [showConsoleLogs] = useState(true);
@@ -31,25 +13,6 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
const [exportProgress, setExportProgress] = useState(0);
const [exportStatus, setExportStatus] = useState('');
// Helper function to get font file path based on font family and style
const getFontFilePath = (fontFamily, fontWeight, fontStyle) => {
const family = fontFamily || 'Arial';
const config = FONT_CONFIG[family] || FONT_CONFIG.Arial;
const isBold = fontWeight === 'bold' || fontWeight === 700;
const isItalic = fontStyle === 'italic';
if (isBold && isItalic) {
return config.boldItalic;
} else if (isBold) {
return config.bold;
} else if (isItalic) {
return config.italic;
} else {
return config.normal;
}
};
useEffect(() => {
console.log(JSON.stringify(timelineElements));
}, [timelineElements]);
@@ -82,24 +45,202 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
return arg;
};
// Better text escaping function for FFmpeg drawtext filter
const escapeTextForDrawtext = (text) => {
return text
.replace(/\\/g, '\\\\\\\\') // Escape backslashes - needs 4 backslashes for proper escaping
.replace(/'/g, "\\'") // Escape single quotes
.replace(/:/g, '\\:') // Escape colons
.replace(/\[/g, '\\[') // Escape square brackets
.replace(/\]/g, '\\]')
.replace(/,/g, '\\,') // Escape commas
.replace(/;/g, '\\;') // Escape semicolons
.replace(/\|/g, '\\|') // Escape pipes
.replace(/\n/g, ' ') // Replace newlines with spaces
.replace(/\r/g, ' ') // Replace carriage returns with spaces
.replace(/\t/g, ' '); // Replace tabs with spaces
// Ensure text element has proper width properties for consistent rendering
const ensureTextProperties = async (textElement, dimensions) => {
// If fixedWidth and offsetX are already set, use them
if (textElement.fixedWidth !== undefined && textElement.offsetX !== undefined) {
return textElement;
}
// Create a temporary stage to measure text dimensions
const tempStage = new Konva.Stage({
container: document.createElement('div'),
width: dimensions.width,
height: dimensions.height,
});
const tempLayer = new Konva.Layer();
tempStage.add(tempLayer);
// Create temporary text node to measure
const tempTextNode = new Konva.Text({
text: textElement.text,
fontSize: textElement.fontSize,
fontStyle: getFontStyle(textElement),
fontFamily: textElement.fontFamily || 'Montserrat',
align: 'center',
verticalAlign: 'middle',
wrap: 'word',
// Use a reasonable width for text wrapping (80% of canvas width)
width: dimensions.width * 0.8,
});
tempLayer.add(tempTextNode);
tempTextNode._setTextData();
// Get measured dimensions
const measuredWidth = tempTextNode.width();
const measuredTextWidth = tempTextNode.textWidth;
// Calculate offsetX for center alignment
const offsetX = measuredWidth / 2;
// Cleanup
tempStage.destroy();
// Return element with calculated properties
return {
...textElement,
fixedWidth: measuredWidth,
offsetX: offsetX,
};
};
// Debug function to compare preview vs export text properties
const debugTextProperties = (textElement) => {
if (showConsoleLogs) {
console.log('🔍 Text Element Properties for Export:');
console.log(' text:', textElement.text);
console.log(' x:', textElement.x);
console.log(' y:', textElement.y);
console.log(' fontSize:', textElement.fontSize);
console.log(' fontFamily:', textElement.fontFamily);
console.log(' fontWeight:', textElement.fontWeight);
console.log(' fontStyle:', textElement.fontStyle);
console.log(' width (fixedWidth):', textElement.fixedWidth);
console.log(' offsetX:', textElement.offsetX);
console.log(' rotation:', textElement.rotation);
console.log(' fill:', textElement.fill);
console.log(' stroke:', textElement.stroke);
console.log(' strokeWidth:', textElement.strokeWidth);
}
};
// Render text element to image using Konva
const renderTextElementToImage = async (textElement, dimensions) => {
showConsoleLogs && console.log(`🎨 Rendering text element: "${textElement.text.substring(0, 30)}..."`);
// Ensure text element has proper width properties
const processedTextElement = await ensureTextProperties(textElement, dimensions);
// Debug text properties
debugTextProperties(processedTextElement);
// Create offscreen stage with canvas dimensions
const stage = new Konva.Stage({
container: document.createElement('div'),
width: dimensions.width,
height: dimensions.height,
});
const layer = new Konva.Layer();
stage.add(layer);
// Wait a bit for fonts to be ready (same as preview)
await new Promise((resolve) => setTimeout(resolve, 50));
// Create text node with EXACT same properties as preview
const textNode = new Konva.Text({
text: processedTextElement.text,
x: processedTextElement.x,
y: processedTextElement.y,
fontSize: processedTextElement.fontSize,
fontStyle: getFontStyle(processedTextElement),
fontFamily: processedTextElement.fontFamily || 'Montserrat',
fill: processedTextElement.fill || '#ffffff',
stroke: processedTextElement.strokeWidth > 0 ? processedTextElement.stroke || '#000000' : undefined,
strokeWidth: processedTextElement.strokeWidth * 3 || 0,
fillAfterStrokeEnabled: true,
strokeScaleEnabled: false,
rotation: processedTextElement.rotation || 0,
// EXACT same alignment as preview
align: 'center',
verticalAlign: 'middle',
wrap: 'word',
// EXACT same scaling as preview
scaleX: 1,
scaleY: 1,
// EXACT same width/offset as preview
width: processedTextElement.fixedWidth,
offsetX: processedTextElement.offsetX,
});
layer.add(textNode);
// Force text measurement like in preview
textNode._setTextData();
layer.draw();
// Wait for rendering to complete
await new Promise((resolve) => setTimeout(resolve, 100));
// Log text dimensions for debugging
showConsoleLogs &&
console.log(
`📏 Export text dimensions: width=${textNode.width()}, height=${textNode.height()}, textWidth=${textNode.textWidth}, textHeight=${textNode.textHeight}`,
);
showConsoleLogs &&
console.log(`📏 Element properties: fixedWidth=${processedTextElement.fixedWidth}, offsetX=${processedTextElement.offsetX}`);
// Convert to image with same resolution as canvas
const dataURL = stage.toDataURL({
mimeType: 'image/png',
quality: 1.0,
pixelRatio: 1, // FIXED: Use 1:1 ratio to match canvas resolution
});
// Cleanup
stage.destroy();
showConsoleLogs && console.log(`✅ Text element rendered to image`);
return dataURL;
};
// Render watermark to image using Konva
const renderWatermarkToImage = async (dimensions) => {
showConsoleLogs && console.log(`🏷️ Rendering watermark`);
const stage = new Konva.Stage({
container: document.createElement('div'),
width: dimensions.width,
height: dimensions.height,
});
const layer = new Konva.Layer();
stage.add(layer);
const watermarkText = new Konva.Text({
text: 'MEMEAIGEN.COM',
x: dimensions.width / 2,
y: dimensions.height / 2 + dimensions.height * 0.2,
fontSize: WATERMARK_CONFIG.fontSize,
fontFamily: WATERMARK_CONFIG.fontFamily,
fill: WATERMARK_CONFIG.fill,
stroke: WATERMARK_CONFIG.stroke,
strokeWidth: WATERMARK_CONFIG.strokeWidth,
opacity: WATERMARK_CONFIG.opacity,
align: 'center',
verticalAlign: 'middle',
offsetX: 90, // Approximate half-width to center the text
offsetY: 5, // Approximate half-height to center the text
});
layer.add(watermarkText);
layer.draw();
const dataURL = stage.toDataURL({
mimeType: 'image/png',
quality: 1.0,
pixelRatio: 1, // FIXED: Match canvas resolution
});
stage.destroy();
showConsoleLogs && console.log(`✅ Watermark rendered to image`);
return dataURL;
};
const generateFFmpegCommand = useCallback(
(is_string = true, useLocalFiles = false) => {
(is_string = true, useLocalFiles = false, textImages = {}, watermarkFileName = null) => {
showConsoleLogs && console.log('🎬 STARTING FFmpeg generation');
showConsoleLogs && console.log(`📐 Canvas size: ${dimensions.width}x${dimensions.height}, Duration: ${totalDuration}s`);
@@ -163,6 +304,20 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
inputIndex++;
});
// Add text image inputs
texts.forEach((text, i) => {
inputArgs.push('-loop', '1', '-t', totalDuration.toString(), '-i');
inputArgs.push(useLocalFiles ? `text_${i}.png` : textImages[text.id]?.fileName || `text_${i}.png`);
inputIndex++;
});
// Add watermark input if exists
if (watermarkFileName) {
inputArgs.push('-loop', '1', '-t', totalDuration.toString(), '-i');
inputArgs.push(watermarkFileName);
inputIndex++;
}
let filters = [];
filters.push(`color=black:size=${dimensions.width}x${dimensions.height}:duration=${totalDuration}[base]`);
@@ -181,9 +336,10 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
allVisualElements.map((el) => `${el.elementType}${el.originalIndex}(L${el.layer || 0})`).join(' → '),
);
// Track input indices for videos and images
// Track input indices for videos, images, and text images
let videoInputIndex = 0;
let imageInputIndex = videos.length; // Images start after videos
let textInputIndex = videos.length + images.length; // Text images start after regular images
// Process elements in layer order
allVisualElements.forEach((element, processingIndex) => {
@@ -261,41 +417,24 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
const i = element.originalIndex;
showConsoleLogs &&
console.log(`📝 Text ${i} (Layer ${t.layer || 0}) - Position: (${t.x}, ${t.y}) Text: "${t.text.substring(0, 30)}..."`);
console.log(`📝 Text ${i} (Layer ${t.layer || 0}) - Text: "${t.text.substring(0, 30)}..." - Using Konva-rendered image`);
// Better text escaping for FFmpeg
const escapedText = escapeTextForDrawtext(t.text);
// Use overlay filter for Konva-rendered text image
filters.push(
`[${videoLayer}][${textInputIndex}:v]overlay=0:0:enable='between(t,${t.startTime},${t.startTime + t.duration})'[t${i}_out]`,
);
// Get the appropriate font file path
const fontFilePath = getFontFilePath(t.fontFamily, t.fontWeight, t.fontStyle);
const fontFileName = fontFilePath.split('/').pop();
// Center the text: x position is the center point, y is adjusted for baseline
const centerY = Math.round(t.y + t.fontSize * 0.3); // Adjust for text baseline
// Format colors for FFmpeg
const fontColor = formatColorForFFmpeg(t.fill);
const borderColor = formatColorForFFmpeg(t.stroke);
const borderWidth = Math.max(0, t.strokeWidth || 0); // Ensure non-negative
// Build drawtext filter with proper border handling
// FIXED: Wrap enable parameter without quotes to avoid truncation
let drawTextFilter = `[${videoLayer}]drawtext=fontfile=/${fontFileName}:text='${escapedText}':x=(w-tw)/2:y=${centerY}:fontsize=${t.fontSize}:fontcolor=${fontColor}`;
// Only add border if strokeWidth > 0
if (borderWidth > 0) {
drawTextFilter += `:borderw=${borderWidth}:bordercolor=${borderColor}`;
}
// FIXED: Don't wrap enable parameter in quotes - this was causing the truncation
drawTextFilter += `:enable=between(t\\,${t.startTime}\\,${t.startTime + t.duration})[t${i}]`;
showConsoleLogs && console.log(`Text filter ${i}:`, drawTextFilter);
filters.push(drawTextFilter);
videoLayer = `t${i}`;
videoLayer = `t${i}_out`;
textInputIndex++;
}
});
// Add watermark overlay if exists
if (watermarkFileName) {
filters.push(`[${videoLayer}][${textInputIndex}:v]overlay=0:0[watermark_out]`);
videoLayer = 'watermark_out';
}
showConsoleLogs && console.log('🎵 PROCESSING AUDIO FOR', videos.length, 'VIDEOS');
let audioOutputs = [];
@@ -358,6 +497,14 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
inputStrings.push(`-loop 1 -t ${img.duration} -i ${escapeShellArg(useLocalFiles ? `input_image_${i}.jpg` : img.source)}`);
});
texts.forEach((text, i) => {
inputStrings.push(`-loop 1 -t ${totalDuration} -i ${escapeShellArg(useLocalFiles ? `text_${i}.png` : text.id)}`);
});
if (watermarkFileName) {
inputStrings.push(`-loop 1 -t ${totalDuration} -i ${escapeShellArg(watermarkFileName)}`);
}
const inputs = inputStrings.join(' ');
const audioMap = audioArgs.length > 0 ? ` ${audioArgs.map((arg) => escapeShellArg(arg)).join(' ')}` : '';
const command = `ffmpeg -y -c:v libvpx-vp9 ${inputs} -filter_complex ${escapeShellArg(filterComplex)} -map ${escapeShellArg(`[${videoLayer}]`)}${audioMap} -c:a aac -r 30 -t ${totalDuration} -vcodec libx264 output.mp4`;
@@ -380,7 +527,7 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
);
const ffmpegCommand = useMemo(() => {
return generateFFmpegCommand(true, false);
return generateFFmpegCommand(true, false, {}, null);
}, [generateFFmpegCommand]);
const copyFFmpegCommand = useCallback(() => {
@@ -407,7 +554,6 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
showConsoleLogs && console.log('FFmpeg Log:', message);
});
//const baseURL = 'https://unpkg.com/@ffmpeg/core@0.12.10/dist/esm';
const baseURL = window.location.origin + '/ffmpeg_packages/core/dist/esm';
const coreURL = `${baseURL}/ffmpeg-core.js`;
const wasmURL = `${baseURL}/ffmpeg-core.wasm`;
@@ -429,45 +575,40 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
setExportProgress(10);
setExportStatus('Loading fonts...');
await loadTimelineFonts(timelineElements);
showConsoleLogs && console.log('✅ All fonts loaded and ready');
setExportProgress(15);
// Collect all fonts that need to be loaded with their correct paths
const fontsToLoad = new Map(); // Map from filename to full path
setExportStatus('Rendering text elements...');
// Add Arial font (fallback)
fontsToLoad.set('arial.ttf', 'https://raw.githubusercontent.com/ffmpegwasm/testdata/master/arial.ttf');
// Render text elements to images
const texts = timelineElements.filter((el) => el.type === 'text');
const textImages = {};
// Add fonts used by text elements
timelineElements
.filter((el) => el.type === 'text')
.forEach((text) => {
const fontFilePath = getFontFilePath(text.fontFamily, text.fontWeight, text.fontStyle);
const fontFileName = fontFilePath.split('/').pop();
for (let i = 0; i < texts.length; i++) {
const textElement = texts[i];
const dataURL = await renderTextElementToImage(textElement, dimensions);
const imageData = await fetchFile(dataURL);
// Only add if not already in map and not arial.ttf
if (fontFileName !== 'arial.ttf' && !fontsToLoad.has(fontFileName)) {
fontsToLoad.set(fontFileName, fontFilePath);
}
});
const fileName = `text_${i}.png`;
await ffmpeg.writeFile(fileName, imageData);
textImages[textElement.id] = { fileName, index: i };
showConsoleLogs && console.log('Fonts to load:', Array.from(fontsToLoad.entries()));
// Load each unique font
let fontProgress = 0;
for (const [fontFileName, fontPath] of fontsToLoad) {
try {
showConsoleLogs && console.log(`Loading font: ${fontFileName} from ${fontPath}`);
await ffmpeg.writeFile(fontFileName, await fetchFile(fontPath));
showConsoleLogs && console.log(`✓ Font ${fontFileName} loaded successfully`);
fontProgress++;
setExportProgress(10 + Math.round((fontProgress / fontsToLoad.size) * 10));
} catch (error) {
console.error(`❌ Failed to load font ${fontFileName} from ${fontPath}:`, error);
// If font loading fails, we'll use arial.ttf as fallback
}
setExportProgress(15 + Math.round((i / texts.length) * 15));
}
showConsoleLogs && console.log('All fonts loaded!');
setExportProgress(20);
// Render watermark if needed (you'll need to pass watermarked prop)
let watermarkFileName = null;
// Uncomment if you have watermarked prop available:
// if (watermarked) {
// const watermarkDataURL = await renderWatermarkToImage(dimensions);
// const watermarkImageData = await fetchFile(watermarkDataURL);
// watermarkFileName = 'watermark.png';
// await ffmpeg.writeFile(watermarkFileName, watermarkImageData);
// }
setExportProgress(30);
showConsoleLogs && console.log('✅ All text elements rendered to images');
setExportStatus('Downloading media...');
const videos = timelineElements.filter((el) => el.type === 'video');
@@ -489,7 +630,7 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
throw new Error(`Failed to download video ${i}: ${error.message}`);
}
mediaProgress++;
setExportProgress(20 + Math.round((mediaProgress / totalMedia) * 40));
setExportProgress(30 + Math.round((mediaProgress / totalMedia) * 40));
}
// Download images
@@ -503,7 +644,7 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
throw new Error(`Failed to download image ${i}: ${error.message}`);
}
mediaProgress++;
setExportProgress(20 + Math.round((mediaProgress / totalMedia) * 40));
setExportProgress(30 + Math.round((mediaProgress / totalMedia) * 40));
}
showConsoleLogs && console.log('All media downloaded successfully!');
@@ -517,11 +658,10 @@ const useVideoExport = ({ timelineElements, dimensions, totalDuration }) => {
}
setExportStatus('Processing video...');
let args = generateFFmpegCommand(false, true);
let args = generateFFmpegCommand(false, true, textImages, watermarkFileName);
showConsoleLogs && console.log('Generated FFmpeg arguments:', args);
showConsoleLogs && console.log(generateFFmpegCommand(true, true));
showConsoleLogs && console.log(generateFFmpegCommand(true, true, textImages, watermarkFileName));
setExportProgress(70);

View File

@@ -2,14 +2,17 @@ import { Button } from '@/components/ui/button';
import { useMitt } from '@/plugins/MittContext';
import useVideoEditorStore from '@/stores/VideoEditorStore';
import { Type } from 'lucide-react';
import { useEffect, useRef } from 'react';
import { useEffect, useRef, useState } from 'react';
import { Group, Image, Layer, Line, Stage, Text, Transformer } from 'react-konva';
import { Html } from 'react-konva-utils';
// Import our custom hooks and utilities
import { useElementSelection } from './video-preview/video-preview-element-selection';
import { useElementTransform } from './video-preview/video-preview-element-transform';
import { getImageSource, getTextFontStyle } from './video-preview/video-preview-utils';
import { getImageSource } from './video-preview/video-preview-utils';
// Import centralized font management
import { getFontStyle, loadTimelineFonts, WATERMARK_CONFIG } from '@/modules/editor/fonts';
const VideoPreview = ({
watermarked,
@@ -56,6 +59,51 @@ const VideoPreview = ({
const stageRef = useRef(null);
const elementRefs = useRef({});
// Font loading state
const [fontsLoaded, setFontsLoaded] = useState(false);
const [fontLoadingAttempted, setFontLoadingAttempted] = useState(false);
// Load fonts when timeline elements change
useEffect(() => {
const loadFonts = async () => {
if (timelineElements.length > 0 && !fontLoadingAttempted) {
setFontLoadingAttempted(true);
try {
await loadTimelineFonts(timelineElements);
setFontsLoaded(true);
console.log('✅ Fonts loaded in preview');
// Force redraw after fonts load to recalculate text dimensions
setTimeout(() => {
if (layerRef.current) {
layerRef.current.batchDraw();
}
}, 100);
} catch (error) {
console.warn('⚠️ Font loading failed:', error);
setFontsLoaded(true); // Continue anyway with fallback fonts
}
}
};
loadFonts();
}, [timelineElements, fontLoadingAttempted]);
// Force text remeasurement when fonts load
useEffect(() => {
if (fontsLoaded && layerRef.current) {
// Find all text nodes and force them to recalculate
const textNodes = layerRef.current.find('Text');
textNodes.forEach((textNode) => {
// Force Konva to recalculate text dimensions
textNode._setTextData();
textNode.cache();
textNode.clearCache();
});
layerRef.current.batchDraw();
}
}, [fontsLoaded]);
// Use our custom hooks
const {
selectedElementId,
@@ -140,19 +188,44 @@ const VideoPreview = ({
);
} else if (element.type === 'text') {
return (
<Group key={element.id}>
<Group key={`${element.id}-${fontsLoaded}`}>
<Text
ref={(node) => {
if (node) {
elementRefs.current[element.id] = node;
// Force text measurement after font loading
if (fontsLoaded) {
setTimeout(() => {
node._setTextData();
// Debug log preview text properties
console.log(`🔍 Preview Text ${element.id} Properties:`);
console.log(' text:', element.text);
console.log(' x:', element.x);
console.log(' y:', element.y);
console.log(' fontSize:', element.fontSize);
console.log(' fontFamily:', element.fontFamily);
console.log(' width (fixedWidth):', element.fixedWidth);
console.log(' offsetX:', element.offsetX);
console.log(' node.width():', node.width());
console.log(' node.height():', node.height());
console.log(' node.textWidth:', node.textWidth);
console.log(' node.textHeight:', node.textHeight);
if (layerRef.current) {
layerRef.current.batchDraw();
}
}, 0);
}
}
}}
text={element.text}
x={element.x}
y={element.y}
fontSize={element.fontSize}
fontStyle={getTextFontStyle(element)}
fontFamily={element.fontFamily || 'Arial'}
fontStyle={getFontStyle(element)} // Use centralized function
fontFamily={element.fontFamily || 'Montserrat'}
fill={element.fill || '#ffffff'}
stroke={element.strokeWidth > 0 ? element.stroke || '#000000' : undefined}
strokeWidth={element.strokeWidth * 3 || 0}
@@ -246,15 +319,16 @@ const VideoPreview = ({
{/* Watermark - only show when watermarked is true */}
{watermarked && (
<Text
key={`watermark-${fontsLoaded}`}
text="MEMEAIGEN.COM"
x={dimensions.width / 2}
y={dimensions.height / 2 + dimensions.height * 0.2}
fontSize={20}
fontFamily="Bungee"
fill="white"
stroke="black"
strokeWidth={2}
opacity={0.5}
fontSize={WATERMARK_CONFIG.fontSize}
fontFamily={WATERMARK_CONFIG.fontFamily}
fill={WATERMARK_CONFIG.fill}
stroke={WATERMARK_CONFIG.stroke}
strokeWidth={WATERMARK_CONFIG.strokeWidth}
opacity={WATERMARK_CONFIG.opacity}
align="center"
verticalAlign="middle"
offsetX={90} // Approximate half-width to center the text

View File

@@ -1,5 +1,8 @@
// video-preview-utils.js
// Import centralized font management
import { getFontStyle } from '@/modules/editor/fonts';
// Snap settings
export const POSITION_SNAP_THRESHOLD = 10; // Pixels within which to snap to center
@@ -16,21 +19,8 @@ export const getImageSource = (element, videoStates, isPlaying) => {
return null;
};
// Helper function to get font style for text elements
export const getTextFontStyle = (element) => {
const isBold = element.fontWeight === 'bold' || element.fontWeight === 700;
const isItalic = element.fontStyle === 'italic';
if (isBold && isItalic) {
return 'bold italic';
} else if (isBold) {
return 'bold';
} else if (isItalic) {
return 'italic';
} else {
return 'normal';
}
};
// Re-export the centralized font style function for backward compatibility
export { getFontStyle as getTextFontStyle };
// Check if element uses center-offset positioning
export const usesCenterPositioning = (elementType) => {