- Conversation model: store dialog context (max 10 rounds), JSON messages - POST /api/prompt/continue: append round, build LLM context from history - GET/DELETE /api/conversation/🆔 retrieve or clear conversation - Vue: refine input card below result, round counter, reset button - Vue: continuePrompt API with conversation_id tracking Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
38 lines
1.1 KiB
TypeScript
38 lines
1.1 KiB
TypeScript
import client from '../client'
|
|
import type {
|
|
GenerateMetaResponse,
|
|
GeneratePromptResponse,
|
|
TemplatesByCategoryResponse,
|
|
} from '../types/template'
|
|
|
|
export function fetchGenerateMeta() {
|
|
return client.get<GenerateMetaResponse>('/api/generate/meta').then((r) => r.data)
|
|
}
|
|
|
|
export function fetchTemplatesByCategory(category: string) {
|
|
const path = '/api/templates/' + encodeURIComponent(category)
|
|
return client.get<TemplatesByCategoryResponse>(path).then((r) => r.data)
|
|
}
|
|
|
|
export function generatePrompt(body: { input_text: string; template_id: number | null; max_tokens?: number }) {
|
|
return client.post<GeneratePromptResponse>('/api/prompt/generate', body).then((r) => r.data)
|
|
}
|
|
|
|
export interface ContinuePromptResponse {
|
|
success: boolean
|
|
message?: string
|
|
conversation_id: number
|
|
generated_text: string
|
|
rounds: number
|
|
prompt?: { id: number; input_text: string; generated_text: string }
|
|
}
|
|
|
|
export function continuePrompt(body: {
|
|
conversation_id?: number | null
|
|
previous_result: string
|
|
refine_instruction: string
|
|
template_id?: number | null
|
|
}) {
|
|
return client.post<ContinuePromptResponse>('/api/prompt/continue', body).then((r) => r.data)
|
|
}
|