refactor(deps): migrate to nuxt v4

This commit is contained in:
2026-02-10 00:31:04 +08:00
parent f1b9cea060
commit 880b85f75d
88 changed files with 80 additions and 60 deletions

View File

@@ -0,0 +1,30 @@
export const fetchCourseSubtitleUrl = async (
course: resp.gen.CourseGenItem
) => {
const loginState = useLoginState()
try {
const subtitleRecord = await useFetchWrapped<
{
page?: number
perpage?: number
task_id: string
} & AuthedRequest,
BaseResponse<PagedData<resp.gen.CourseSubtitleCreate>>
>('App.Digital_VideoSubtitle.GetList', {
token: loginState.token!,
user_id: loginState.user.id,
task_id: course.task_id,
page: 1,
perpage: 1,
})
if (subtitleRecord.data.items.length !== 1) {
return course.subtitle_url
}
return subtitleRecord.data.items[0].url
} catch (err) {
return course.subtitle_url
}
}

View File

@@ -0,0 +1,20 @@
export const useBlobUrlFromB64 = (dataurl: string): string => {
// data:image/jpeg;base64,/9j/...
const arr = dataurl.split(',')
if (arr.length < 2) {
throw new Error('dataurl is not a valid base64 image')
}
const mimeMatches = arr[0].match(/:(.*?);/)
if (mimeMatches === null) {
throw new Error('dataurl is not a valid base64 image')
}
const mime = mimeMatches[1] //image/png
const b64data = atob(arr[1])
let length = b64data.length
const u8arr = new Uint8Array(length)
while (length--) {
u8arr[length] = b64data.charCodeAt(length)
}
const blob = new Blob([u8arr], { type: mime })
return URL.createObjectURL(blob)
}

View File

@@ -0,0 +1,22 @@
export const useDefer = (maxFrame: number = 1000) => {
const frame = ref(1)
let rafId: number
function updateFrame() {
rafId = requestAnimationFrame(() => {
frame.value++
if (frame.value > maxFrame) return
updateFrame()
})
}
onMounted(() => {
updateFrame()
})
onUnmounted(() => {
cancelAnimationFrame(rafId)
})
return (n: number) => {
return frame.value >= n
}
}

View File

@@ -0,0 +1,49 @@
import { EventEmitter } from 'events'
export const useDownload = (
url: string,
filename: string
): {
download: () => void
progressEmitter: EventEmitter
} => {
const progressEmitter = new EventEmitter()
const download = () => {
const xhr = new XMLHttpRequest()
xhr.open('GET', url, true)
xhr.responseType = 'blob'
xhr.onprogress = (event) => {
if (event.lengthComputable) {
const percentComplete = (event.loaded / event.total) * 100
progressEmitter.emit('progress', percentComplete)
}
}
xhr.onload = function () {
if (this.status === 200) {
const blob = new Blob([this.response], {
type: 'application/octet-stream',
})
const url = window.URL.createObjectURL(blob)
const link = document.createElement('a')
link.href = url
link.setAttribute('download', filename)
document.body.appendChild(link)
link.click()
link.parentNode?.removeChild(link)
progressEmitter.emit('done')
} else {
progressEmitter.emit('error', new Error('资源已过期或不存在'))
}
}
xhr.onerror = function () {
progressEmitter.emit('error', new Error('网络错误,下载失败'))
}
xhr.send()
}
return {
download,
progressEmitter,
}
}

View File

@@ -0,0 +1,103 @@
import { FFmpeg } from '@ffmpeg/ffmpeg'
import { toBlobURL } from '@ffmpeg/util'
let ffmpegInstance: FFmpeg | null = null
let loadPromise: Promise<FFmpeg> | null = null
/**
* 获取或初始化 FFmpeg 实例(单例模式)
*/
export const useFFmpeg = async () => {
// 如果已经加载过,直接返回
if (ffmpegInstance && ffmpegInstance.loaded) {
return ffmpegInstance
}
// 如果正在加载中,等待加载完成
if (loadPromise) {
return loadPromise
}
loadPromise = initializeFFmpeg()
return loadPromise
}
async function initializeFFmpeg(enableMT: boolean = false): Promise<FFmpeg> {
try {
const ffmpeg = new FFmpeg()
ffmpeg.on('log', ({ message, type }) => {
console.log(`[ffmpeg - ${type}]`, message)
})
ffmpeg.on('progress', ({ progress, time }) => {
console.log(`[ffmpeg] P: ${(progress * 100).toFixed(2)}%, T: ${time}ms`)
})
const baseURL = enableMT
? 'https://cdn.jsdelivr.net/npm/@ffmpeg/core-mt@0.12.10/dist/esm'
: 'https://cdn.jsdelivr.net/npm/@ffmpeg/core@0.12.10/dist/esm'
const coreURL = await toBlobURL(
`${baseURL}/ffmpeg-core.js`,
'text/javascript'
)
const wasmURL = await toBlobURL(
`${baseURL}/ffmpeg-core.wasm`,
'application/wasm'
)
let loadPayload = {
coreURL,
wasmURL,
}
if (enableMT) {
const workerURL = await toBlobURL(
`${baseURL}/ffmpeg-core.worker.js`,
'text/javascript'
)
Object.assign(loadPayload, { workerURL })
}
const isLoaded = await ffmpeg.load(loadPayload)
console.log('[FFmpeg] FFmpeg 加载完成isLoaded:', isLoaded)
ffmpegInstance = ffmpeg
loadPromise = null
return ffmpeg
} catch (error) {
console.error('[FFmpeg] 初始化失败:', error)
loadPromise = null
throw error
}
}
/**
* 清理 FFmpeg 资源
*/
export const cleanupFFmpeg = () => {
if (ffmpegInstance && ffmpegInstance.loaded) {
console.log('[FFmpeg] 清理 FFmpeg 资源...')
ffmpegInstance.terminate()
ffmpegInstance = null
loadPromise = null
}
}
/**
* 将 Blob/File 转换为 Uint8Array
*/
export const fileToUint8Array = async (
file: File | Blob
): Promise<Uint8Array> => {
return new Promise((resolve, reject) => {
const reader = new FileReader()
reader.onload = (e) => {
const arrayBuffer = e.target?.result as ArrayBuffer
resolve(new Uint8Array(arrayBuffer))
}
reader.onerror = reject
reader.readAsArrayBuffer(file)
})
}

View File

@@ -0,0 +1,21 @@
import { useFormPayload } from '~/composables/useFormPayload'
export const useFetchWrapped = <TypeReq, TypeResp>(
action: string,
payload?: TypeReq,
options?: {
method?: 'GET' | 'POST'
headers?: Record<string, string>
baseURL?: string
}
) => {
const runtimeConfig = useRuntimeConfig()
return $fetch<TypeResp>('/', {
baseURL: options?.baseURL || runtimeConfig.public.API_BASE,
method: options?.method || 'POST',
query: {
s: action,
},
body: useFormPayload(payload as object),
})
}

View File

@@ -0,0 +1,50 @@
import { useFetchWrapped } from './useFetchWrapped'
import { useLoginState } from './useLoginState'
const putOSS = (file: File, signed_url: string) =>
new Promise<void>((resolve, reject) => {
let xhr = new XMLHttpRequest()
xhr.open('PUT', signed_url as string)
xhr.setRequestHeader('Content-Type', '')
xhr.onload = () => {
if (xhr.status === 200) {
resolve()
} else {
reject()
}
}
xhr.onerror = reject
xhr.send(file)
})
export const useFileGo = (
file: File,
type: 'material' | 'ppt' | 'subtitles' | 'preview' | 'tmp'
) =>
new Promise<string>((resolve, reject) => {
useFetchWrapped<req.file.Upload & AuthedRequest, BaseResponse<string>>(
'App.Tools_Upload.DirectUpload',
{
token: useLoginState().token!,
user_id: useLoginState().user.id,
file_name: file.name,
file_type: type,
file_size: file.size,
file_mime: file.type,
}
)
.then(async (res) => {
if (res.data !== '') {
try {
await putOSS(file, res.data)
let url = res.data.split('?')[0]
resolve(url)
} catch (err) {
reject(err)
}
}
})
.catch((err) => {
reject(err)
})
})

View File

@@ -0,0 +1,10 @@
export const useFormPayload = (payload: object) => {
const formData = new FormData()
for (const dataKey in payload) {
if (payload.hasOwnProperty(dataKey)) {
// @ts-ignore
formData.append(dataKey, payload[dataKey])
}
}
return formData
}

View File

@@ -0,0 +1,32 @@
import type { ResultBlockMeta } from '~/components/aigc/drawing'
import type { ChatSession } from '~/typings/llm'
export interface HistoryItem {
fid: string
data_id?: string
prompt: string
meta: ResultBlockMeta
images?: string[]
}
export const useHistory = defineStore(
'xsh_assistant_aigc_history',
() => {
const text2img = ref<HistoryItem[]>([])
const chatSessions = ref<ChatSession[]>([])
const setChatSessions = (sessions: ChatSession[]) => {
chatSessions.value = sessions
}
return {
text2img,
chatSessions,
setChatSessions,
}
},
{
persist: {
storage: piniaPluginPersistedstate.localStorage(),
},
}
)

50
app/composables/useLLM.ts Normal file
View File

@@ -0,0 +1,50 @@
import {
type ChatMessage,
llmModels,
type LLMSpark,
type MessageRole,
type ModelTag,
} from '~/typings/llm'
import { useFetchWrapped } from '~/composables/useFetchWrapped'
export interface LLMRequestOptions {
modelTag: ModelTag
}
export const useLLM = (
context: ChatMessage[],
options: LLMRequestOptions
): Promise<string> =>
new Promise((resolve, reject) => {
const { modelTag } = options
const model = llmModels.find((model) => model.tag === modelTag)
if (!model) return reject('model specified is not available')
const loginState = useLoginState()
useFetchWrapped<
LLMSpark.request | AuthedRequest,
BaseResponse<LLMSpark.response>
>(model.endpoint, {
token: loginState.token || '',
user_id: loginState.user.id,
prompt: JSON.stringify(
context
.filter((c) => c.content && !c.interrupted)
.map((c) => ({
role: c.role,
content: c.content,
}))
),
})
.then((res) => {
if (res.ret !== 200) return reject(res.msg || 'unknown error')
if (res.data.request_msg) return resolve(res.data.request_msg)
if (res.data.request_fail)
return reject(
res.data.request_fail?.header?.message || 'unknown error'
)
return reject('unknown error')
})
.catch((err) => {
reject(err)
})
})

View File

@@ -0,0 +1,79 @@
import { useFetchWrapped } from '~/composables/useFetchWrapped'
export const useLoginState = defineStore(
'loginState',
() => {
const is_logged_in = ref(false)
const token = ref<string | null>(null)
const user = ref<UserSchema>({} as UserSchema)
const checkSession = () => {
return new Promise<boolean>((resolve) => {
if (!token.value) return resolve(false)
useFetchWrapped<AuthedRequest, BaseResponse<resp.user.CheckSession>>(
'App.User_User.CheckSession',
{
token: token.value,
user_id: user.value.id,
}
)
.then((res) => {
if (res.ret !== 200) {
resolve(false)
return
}
resolve(res.data.is_login)
// update global state
is_logged_in.value = res.data.is_login
})
.catch((err) => resolve(false))
})
}
const updateProfile = () => {
return new Promise<UserSchema>((resolve, reject) => {
if (!token.value) return reject('token is empty')
useFetchWrapped<AuthedRequest, BaseResponse<resp.user.Profile>>(
'App.User_User.Profile',
{
token: token.value,
user_id: user.value.id,
}
)
.then((res) => {
if (res.ret !== 200) {
reject(res.msg || '未知错误')
return
}
user.value = res.data.profile
resolve(res.data.profile)
})
.catch((err) => reject(err || '未知错误'))
})
}
const logout = () =>
new Promise<void>((resolve) => {
token.value = null
user.value = {} as UserSchema
is_logged_in.value = false
resolve()
})
return {
is_logged_in,
token,
user,
checkSession,
updateProfile,
logout,
}
},
{
persist: {
key: 'xsh_assistant_persisted_state',
storage: piniaPluginPersistedstate.localStorage(),
paths: ['is_logged_in', 'token', 'user'],
},
}
)

View File

@@ -0,0 +1,39 @@
export const useTourState = defineStore(
'tour_state',
() => {
const tourState = ref<{ [key: string]: boolean }>({})
const isTourDone = (tourId: string) => tourState.value[tourId] || false
const setTourDone = (tourId: string) => {
tourState.value = {
...tourState.value,
[tourId]: true,
}
}
const autoDriveTour = (
tourId: string,
driver: ReturnType<typeof useDriver>
) => {
if (isTourDone(tourId)) return
driver.setConfig({
...driver.getConfig(),
onDestroyed: () => setTourDone(tourId),
})
driver.drive()
}
return {
tourState,
isTourDone,
setTourDone,
autoDriveTour,
}
},
{
persist: {
key: 'xsh_assistant_tour_state',
storage: piniaPluginPersistedstate.localStorage(),
paths: ['tourState'],
},
}
)

View File

@@ -0,0 +1,6 @@
/**
* 已废弃:使用 useVideoBackgroundCompositing 替代
* 该文件保留用于向后兼容
*/
export { useVideoBackgroundCompositing as useVideoBackgroundCombinator } from './useVideoBackgroundCompositing'

View File

@@ -0,0 +1,165 @@
import { fetchFile } from '@ffmpeg/util'
import { useFFmpeg, fileToUint8Array } from './useFFmpeg'
/**
* 获取图片的宽高信息
*/
const getImageDimensions = async (
imageData: Uint8Array
): Promise<{ width: number; height: number }> => {
return new Promise((resolve, reject) => {
const blob = new Blob([imageData], { type: 'image/png' })
const url = URL.createObjectURL(blob)
const img = new Image()
img.onload = () => {
URL.revokeObjectURL(url)
resolve({ width: img.width, height: img.height })
}
img.onerror = () => {
URL.revokeObjectURL(url)
reject(new Error('Failed to load image'))
}
img.src = url
})
}
/**
* 计算等比缩放到720P的尺寸
* 720P 指高度为720宽度按原宽高比计算
*/
const calculateScaledDimensions = (
width: number,
height: number
): { width: number; height: number } => {
const targetHeight = 720
// 如果原始高度小于等于720保持原始尺寸
if (height <= targetHeight) {
return { width, height }
}
// 计算缩放比例
const scale = targetHeight / height
const scaledWidth = Math.round(width * scale)
// 确保宽度为偶数(视频编码要求)
const finalWidth = scaledWidth % 2 === 0 ? scaledWidth : scaledWidth - 1
return { width: finalWidth, height: targetHeight }
}
export type CompositingPhase =
| 'loading'
| 'analyzing'
| 'preparing'
| 'executing'
| 'finalizing'
export type CompositingProgressCallback = (info: {
progress: number
phase: CompositingPhase
}) => void
/**
* 使用 FFmpeg WASM 将透明通道的视频与背景图片进行合成
* @param videoUrl - WebM 视频 URL带透明通道的数字人视频
* @param backgroundImage - 背景图片File 对象或 URL 字符串)
* @param options - 额外选项
* @returns 合成后的视频 Blob
*/
export const useVideoBackgroundCompositing = async (
videoUrl: string,
backgroundImage: File | string,
options?: {
onProgress?: CompositingProgressCallback
}
) => {
const ffmpeg = await useFFmpeg()
const progressCallback = options?.onProgress
const videoFileName = 'input_video.webm'
const backgroundFileName = 'background.png'
const outputFileName = 'output.mp4'
try {
progressCallback?.({ progress: 10, phase: 'loading' })
const videoData = await fetchFile(videoUrl)
const backgroundData = await fetchFile(backgroundImage)
progressCallback?.({ progress: 15, phase: 'analyzing' })
const { width: bgWidth, height: bgHeight } =
await getImageDimensions(backgroundData)
console.log(
`[Compositing] Background image dimensions: ${bgWidth}x${bgHeight}`
)
const { width: outputWidth, height: outputHeight } =
calculateScaledDimensions(bgWidth, bgHeight)
console.log(
`[Compositing] Output dimensions: ${outputWidth}x${outputHeight}`
)
progressCallback?.({ progress: 20, phase: 'preparing' })
await ffmpeg.writeFile(videoFileName, videoData)
await ffmpeg.writeFile(backgroundFileName, backgroundData)
progressCallback?.({ progress: 25, phase: 'preparing' })
// HACK: 不明原因导致首次执行合成时会报 memory access out of bounds 错误,先执行一次空命令能够规避
await ffmpeg.exec(['-i', 'not-found'])
// 设置 progress 事件监听,映射 FFmpeg 进度到 30-95% 范围
const executingProgressHandler = ({ progress }: { progress: number }) => {
// progress 范围是 0-1映射到 30-95
const mappedProgress = Math.round(30 + progress * 65)
progressCallback?.({ progress: mappedProgress, phase: 'executing' })
}
ffmpeg.on('progress', executingProgressHandler)
progressCallback?.({ progress: 30, phase: 'executing' })
// prettier-ignore
const exitCode = await ffmpeg.exec([
'-i', backgroundFileName,
'-c:v', 'libvpx-vp9',
'-i', videoFileName,
'-filter_complex', 'overlay=(W-w)/2:H-h',
'-c:v', 'libx264',
outputFileName
])
ffmpeg.off('progress', executingProgressHandler)
if (exitCode !== 0) {
throw new Error(`FFmpeg command failed with exit code ${exitCode}`)
}
progressCallback?.({ progress: 95, phase: 'finalizing' })
const outputData = await ffmpeg.readFile(outputFileName)
let outputArray: Uint8Array
if (outputData instanceof Uint8Array) {
outputArray = outputData
} else if (typeof outputData === 'string') {
outputArray = new TextEncoder().encode(outputData)
} else {
outputArray = new Uint8Array(outputData as ArrayBufferLike)
}
const outputBlob = new Blob([outputArray], { type: 'video/mp4' })
progressCallback?.({ progress: 100, phase: 'finalizing' })
return outputBlob
} catch (error) {
console.error('Video compositing failed:', error)
throw error
} finally {
await ffmpeg.deleteFile(videoFileName)
await ffmpeg.deleteFile(backgroundFileName)
await ffmpeg.deleteFile(outputFileName)
}
}

View File

@@ -0,0 +1,81 @@
import {
Combinator,
EmbedSubtitlesClip,
MP4Clip,
OffscreenSprite,
} from '@webav/av-cliper'
export interface SubtitleEmbeddingOptions {
color?: string
textBgColor?: string | null
type?: 'srt'
fontFamily?: string
fontSize?: number
letterSpacing?: string | null
bottomOffset?: number
strokeStyle?: string
lineWidth?: number | null
lineCap?: CanvasLineCap | null
lineJoin?: CanvasLineJoin | null
textShadow?: {
offsetX: number
offsetY: number
blur: number
color: string
}
videoWidth?: number
videoHeight?: number
}
export const useVideoSubtitleEmbedding = async (
videoUrl: string,
srtUrl: string,
options?: SubtitleEmbeddingOptions
) => {
if (!options) {
options = {
videoWidth: 1920,
videoHeight: 1080,
}
}
console.log(`video clip: ${videoUrl}`)
const videoClip = new MP4Clip((await fetch(videoUrl)).body!)
const videoSprite = new OffscreenSprite(videoClip)
videoSprite.time = { duration: videoClip.meta.duration, offset: 0 }
await videoSprite.ready
const srtSprite = new OffscreenSprite(
new EmbedSubtitlesClip(await (await fetch(srtUrl)).text(), {
videoWidth: 1920,
videoHeight: 1080,
fontSize: 36,
fontFamily: 'Noto Sans SC',
strokeStyle: 'none',
textShadow: {
offsetX: 2,
offsetY: 2,
blur: 6,
color: 'rgba(0, 0, 0, 0.35)',
},
...options,
})
)
await srtSprite.ready
srtSprite.time = { duration: videoClip.meta.duration, offset: 0 }
const combinator = new Combinator({
width: 1920,
height: 1080,
})
await combinator.addSprite(videoSprite)
await combinator.addSprite(srtSprite)
const srcBlob = URL.createObjectURL(
await new Response(combinator.output()).blob()
)
return srcBlob
}