feat(ffmpeg): add ffmpeg-core.wasm for video processing capabilities
This commit is contained in:
103
composables/useFFmpeg.ts
Normal file
103
composables/useFFmpeg.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import { FFmpeg } from '@ffmpeg/ffmpeg'
|
||||
import { toBlobURL } from '@ffmpeg/util'
|
||||
|
||||
let ffmpegInstance: FFmpeg | null = null
|
||||
let loadPromise: Promise<FFmpeg> | null = null
|
||||
|
||||
/**
|
||||
* 获取或初始化 FFmpeg 实例(单例模式)
|
||||
*/
|
||||
export const useFFmpeg = async () => {
|
||||
// 如果已经加载过,直接返回
|
||||
if (ffmpegInstance && ffmpegInstance.loaded) {
|
||||
return ffmpegInstance
|
||||
}
|
||||
|
||||
// 如果正在加载中,等待加载完成
|
||||
if (loadPromise) {
|
||||
return loadPromise
|
||||
}
|
||||
|
||||
loadPromise = initializeFFmpeg()
|
||||
return loadPromise
|
||||
}
|
||||
|
||||
async function initializeFFmpeg(enableMT: boolean = false): Promise<FFmpeg> {
|
||||
try {
|
||||
const ffmpeg = new FFmpeg()
|
||||
|
||||
ffmpeg.on('log', ({ message, type }) => {
|
||||
console.log(`[ffmpeg - ${type}]`, message)
|
||||
})
|
||||
|
||||
ffmpeg.on('progress', ({ progress, time }) => {
|
||||
console.log(`[ffmpeg] P: ${(progress * 100).toFixed(2)}%, T: ${time}ms`)
|
||||
})
|
||||
|
||||
const baseURL = enableMT
|
||||
? 'https://cdn.jsdelivr.net/npm/@ffmpeg/core-mt@0.12.10/dist/esm'
|
||||
: 'https://cdn.jsdelivr.net/npm/@ffmpeg/core@0.12.10/dist/esm'
|
||||
|
||||
const coreURL = await toBlobURL(
|
||||
`${baseURL}/ffmpeg-core.js`,
|
||||
'text/javascript'
|
||||
)
|
||||
const wasmURL = await toBlobURL(
|
||||
`${baseURL}/ffmpeg-core.wasm`,
|
||||
'application/wasm'
|
||||
)
|
||||
|
||||
let loadPayload = {
|
||||
coreURL,
|
||||
wasmURL,
|
||||
}
|
||||
|
||||
if (enableMT) {
|
||||
const workerURL = await toBlobURL(
|
||||
`${baseURL}/ffmpeg-core.worker.js`,
|
||||
'text/javascript'
|
||||
)
|
||||
Object.assign(loadPayload, { workerURL })
|
||||
}
|
||||
|
||||
const isLoaded = await ffmpeg.load(loadPayload)
|
||||
console.log('[FFmpeg] FFmpeg 加载完成,isLoaded:', isLoaded)
|
||||
|
||||
ffmpegInstance = ffmpeg
|
||||
loadPromise = null
|
||||
return ffmpeg
|
||||
} catch (error) {
|
||||
console.error('[FFmpeg] 初始化失败:', error)
|
||||
loadPromise = null
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 清理 FFmpeg 资源
|
||||
*/
|
||||
export const cleanupFFmpeg = () => {
|
||||
if (ffmpegInstance && ffmpegInstance.loaded) {
|
||||
console.log('[FFmpeg] 清理 FFmpeg 资源...')
|
||||
ffmpegInstance.terminate()
|
||||
ffmpegInstance = null
|
||||
loadPromise = null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 将 Blob/File 转换为 Uint8Array
|
||||
*/
|
||||
export const fileToUint8Array = async (
|
||||
file: File | Blob
|
||||
): Promise<Uint8Array> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader()
|
||||
reader.onload = (e) => {
|
||||
const arrayBuffer = e.target?.result as ArrayBuffer
|
||||
resolve(new Uint8Array(arrayBuffer))
|
||||
}
|
||||
reader.onerror = reject
|
||||
reader.readAsArrayBuffer(file)
|
||||
})
|
||||
}
|
||||
6
composables/useVideoBackgroundCombinator.ts
Normal file
6
composables/useVideoBackgroundCombinator.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
/**
|
||||
* 已废弃:使用 useVideoBackgroundCompositing 替代
|
||||
* 该文件保留用于向后兼容
|
||||
*/
|
||||
|
||||
export { useVideoBackgroundCompositing as useVideoBackgroundCombinator } from './useVideoBackgroundCompositing'
|
||||
166
composables/useVideoBackgroundCompositing.ts
Normal file
166
composables/useVideoBackgroundCompositing.ts
Normal file
@@ -0,0 +1,166 @@
|
||||
import { fetchFile } from '@ffmpeg/util'
|
||||
import { useFFmpeg, fileToUint8Array } from './useFFmpeg'
|
||||
|
||||
/**
|
||||
* 获取图片的宽高信息
|
||||
*/
|
||||
const getImageDimensions = async (
|
||||
imageData: Uint8Array
|
||||
): Promise<{ width: number; height: number }> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const blob = new Blob([imageData], { type: 'image/png' })
|
||||
const url = URL.createObjectURL(blob)
|
||||
const img = new Image()
|
||||
|
||||
img.onload = () => {
|
||||
URL.revokeObjectURL(url)
|
||||
resolve({ width: img.width, height: img.height })
|
||||
}
|
||||
|
||||
img.onerror = () => {
|
||||
URL.revokeObjectURL(url)
|
||||
reject(new Error('Failed to load image'))
|
||||
}
|
||||
|
||||
img.src = url
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* 计算等比缩放到720P的尺寸
|
||||
* 720P 指高度为720,宽度按原宽高比计算
|
||||
*/
|
||||
const calculateScaledDimensions = (
|
||||
width: number,
|
||||
height: number
|
||||
): { width: number; height: number } => {
|
||||
const targetHeight = 720
|
||||
|
||||
// 如果原始高度小于等于720,保持原始尺寸
|
||||
if (height <= targetHeight) {
|
||||
return { width, height }
|
||||
}
|
||||
|
||||
// 计算缩放比例
|
||||
const scale = targetHeight / height
|
||||
const scaledWidth = Math.round(width * scale)
|
||||
|
||||
// 确保宽度为偶数(视频编码要求)
|
||||
const finalWidth = scaledWidth % 2 === 0 ? scaledWidth : scaledWidth - 1
|
||||
|
||||
return { width: finalWidth, height: targetHeight }
|
||||
}
|
||||
|
||||
export type CompositingPhase =
|
||||
| 'loading'
|
||||
| 'analyzing'
|
||||
| 'preparing'
|
||||
| 'executing'
|
||||
| 'finalizing'
|
||||
|
||||
export type CompositingProgressCallback = (info: {
|
||||
progress: number
|
||||
phase: CompositingPhase
|
||||
}) => void
|
||||
|
||||
/**
|
||||
* 使用 FFmpeg WASM 将透明通道的视频与背景图片进行合成
|
||||
* @param videoUrl - WebM 视频 URL(带透明通道的数字人视频)
|
||||
* @param backgroundImage - 背景图片(File 对象或 URL 字符串)
|
||||
* @param options - 额外选项
|
||||
* @returns 合成后的视频 Blob
|
||||
*/
|
||||
export const useVideoBackgroundCompositing = async (
|
||||
videoUrl: string,
|
||||
backgroundImage: File | string,
|
||||
options?: {
|
||||
onProgress?: CompositingProgressCallback
|
||||
}
|
||||
) => {
|
||||
const ffmpeg = await useFFmpeg()
|
||||
const progressCallback = options?.onProgress
|
||||
|
||||
const videoFileName = 'input_video.webm'
|
||||
const backgroundFileName = 'background.png'
|
||||
const outputFileName = 'output.mp4'
|
||||
|
||||
try {
|
||||
progressCallback?.({ progress: 10, phase: 'loading' })
|
||||
const videoData = await fetchFile(videoUrl)
|
||||
const backgroundData = await fetchFile(backgroundImage)
|
||||
|
||||
progressCallback?.({ progress: 15, phase: 'analyzing' })
|
||||
const { width: bgWidth, height: bgHeight } = await getImageDimensions(
|
||||
backgroundData
|
||||
)
|
||||
console.log(
|
||||
`[Compositing] Background image dimensions: ${bgWidth}x${bgHeight}`
|
||||
)
|
||||
|
||||
const { width: outputWidth, height: outputHeight } =
|
||||
calculateScaledDimensions(bgWidth, bgHeight)
|
||||
console.log(
|
||||
`[Compositing] Output dimensions: ${outputWidth}x${outputHeight}`
|
||||
)
|
||||
|
||||
progressCallback?.({ progress: 20, phase: 'preparing' })
|
||||
|
||||
await ffmpeg.writeFile(videoFileName, videoData)
|
||||
await ffmpeg.writeFile(backgroundFileName, backgroundData)
|
||||
|
||||
progressCallback?.({ progress: 25, phase: 'preparing' })
|
||||
|
||||
// HACK: 不明原因导致首次执行合成时会报 memory access out of bounds 错误,先执行一次空命令能够规避
|
||||
await ffmpeg.exec(['-i', 'not-found'])
|
||||
|
||||
// 设置 progress 事件监听,映射 FFmpeg 进度到 30-95% 范围
|
||||
const executingProgressHandler = ({ progress }: { progress: number }) => {
|
||||
// progress 范围是 0-1,映射到 30-95
|
||||
const mappedProgress = Math.round(30 + progress * 65)
|
||||
progressCallback?.({ progress: mappedProgress, phase: 'executing' })
|
||||
}
|
||||
ffmpeg.on('progress', executingProgressHandler)
|
||||
|
||||
progressCallback?.({ progress: 30, phase: 'executing' })
|
||||
|
||||
// prettier-ignore
|
||||
const exitCode = await ffmpeg.exec([
|
||||
'-i', backgroundFileName,
|
||||
'-c:v', 'libvpx-vp9',
|
||||
'-i', videoFileName,
|
||||
'-filter_complex', 'overlay=(W-w)/2:H-h',
|
||||
'-c:v', 'libx264',
|
||||
outputFileName
|
||||
])
|
||||
|
||||
ffmpeg.off('progress', executingProgressHandler)
|
||||
|
||||
if (exitCode !== 0) {
|
||||
throw new Error(`FFmpeg command failed with exit code ${exitCode}`)
|
||||
}
|
||||
|
||||
progressCallback?.({ progress: 95, phase: 'finalizing' })
|
||||
|
||||
const outputData = await ffmpeg.readFile(outputFileName)
|
||||
let outputArray: Uint8Array
|
||||
if (outputData instanceof Uint8Array) {
|
||||
outputArray = outputData
|
||||
} else if (typeof outputData === 'string') {
|
||||
outputArray = new TextEncoder().encode(outputData)
|
||||
} else {
|
||||
outputArray = new Uint8Array(outputData as ArrayBufferLike)
|
||||
}
|
||||
const outputBlob = new Blob([outputArray], { type: 'video/mp4' })
|
||||
|
||||
progressCallback?.({ progress: 100, phase: 'finalizing' })
|
||||
|
||||
return outputBlob
|
||||
} catch (error) {
|
||||
console.error('Video compositing failed:', error)
|
||||
throw error
|
||||
} finally {
|
||||
await ffmpeg.deleteFile(videoFileName)
|
||||
await ffmpeg.deleteFile(backgroundFileName)
|
||||
await ffmpeg.deleteFile(outputFileName)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user