84 lines
1.6 KiB
TypeScript
84 lines
1.6 KiB
TypeScript
export type ChatSessionId = string
|
|
export type ChatMessageId = string
|
|
|
|
export type ModelTag =
|
|
'spark1_5' |
|
|
'spark3_0' |
|
|
'spark3_5'
|
|
|
|
export interface LLMModal {
|
|
tag: ModelTag
|
|
name: string
|
|
description: string
|
|
endpoint: string
|
|
}
|
|
|
|
export namespace LLMSpark {
|
|
export interface request {
|
|
prompt: string
|
|
}
|
|
export interface response {
|
|
request_msg?: string
|
|
request_fail?: {
|
|
header?: {
|
|
code: number
|
|
message: string
|
|
sid: string
|
|
status: number
|
|
}
|
|
}
|
|
// answer: string
|
|
// last_data: {
|
|
// payload: {
|
|
// usage: {
|
|
// text: {
|
|
// completion_tokens: number
|
|
// prompt_tokens: number
|
|
// question_tokens: number
|
|
// total_tokens: number
|
|
// }
|
|
// }
|
|
// }
|
|
// }
|
|
}
|
|
}
|
|
|
|
export const llmModels: Readonly<LLMModal[]> = Object.freeze([
|
|
{
|
|
tag: 'spark1_5',
|
|
name: 'Spark 1.5',
|
|
description: 'Spark 1.5',
|
|
endpoint: 'App.Assistant_Spark.Chat_1_5'
|
|
},
|
|
{
|
|
tag: 'spark3_0',
|
|
name: 'Spark 3.0',
|
|
description: 'Spark 3.0',
|
|
endpoint: 'App.Assistant_Spark.Chat_3_0'
|
|
},
|
|
{
|
|
tag: 'spark3_5',
|
|
name: 'Spark 3.5',
|
|
description: 'Spark 3.5',
|
|
endpoint: 'App.Assistant_Spark.Chat_3_5'
|
|
},
|
|
])
|
|
|
|
export interface ChatSession {
|
|
id: ChatSessionId
|
|
subject: string
|
|
create_at: number
|
|
messages: ChatMessage[]
|
|
last_input?: string
|
|
}
|
|
|
|
export type MessageRole = 'user' | 'assistant' | 'system'
|
|
|
|
export interface ChatMessage {
|
|
id: ChatMessageId
|
|
role: MessageRole
|
|
content: string
|
|
create_at?: number
|
|
interrupted?: boolean
|
|
}
|