edge func

This commit is contained in:
2026-04-12 19:07:31 +07:00
parent ec3d400e8a
commit 53afcf5eb2
2 changed files with 10 additions and 47 deletions

View File

@@ -1,58 +1,22 @@
import { useMutation } from "@tanstack/react-query"
import { canUseWritingCheck, recordWritingCheckUsage } from "@/utils/rate-limiter"
import { useAuthStore } from "@/store/auth-store"
import { supabase } from "@/lib/supabase"
import { saveWritingSubmission, countTodayWritingSubmissions } from "@/lib/progress-service"
import type { WritingFeedback } from "@/types"
const AUTH_DAILY_LIMIT = 10
const GUEST_DAILY_LIMIT = 3
const GLM_BASE_URL = "https://open.bigmodel.cn/api/paas/v4"
const GLM_API_KEY = import.meta.env.VITE_GLM_API_KEY as string
const GLM_MODEL = (import.meta.env.VITE_GLM_MODEL as string) || "GLM-4-32B-0414-128K"
// Keep system prompt concise — fewer tokens = more room for output.
// improved_version omitted from schema to reduce output length; added back as optional.
const SYSTEM_PROMPT = `You are an expert English writing teacher for TOEIC and IELTS.
Respond ONLY with valid JSON, no markdown:
{"score":"6.5","grammar":["issue + fix in Vietnamese"],"vocabulary":["observation in Vietnamese"],"structure":"2 sentences in Vietnamese","improved_version":"full improved text","summary":"2 sentences in Vietnamese"}`
async function callGlm(content: string): Promise<WritingFeedback> {
const res = await fetch(`${GLM_BASE_URL}/chat/completions`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${GLM_API_KEY}`,
},
body: JSON.stringify({
model: GLM_MODEL,
messages: [
{ role: "system", content: SYSTEM_PROMPT },
{ role: "user", content: `Analyse:\n\n${content.slice(0, 1500)}` },
],
temperature: 0.3,
max_tokens: 2500,
// Force JSON output mode (OpenAI-compatible, supported by GLM)
response_format: { type: "json_object" },
}),
async function callEdgeFunction(content: string): Promise<WritingFeedback> {
const { data, error } = await supabase.functions.invoke<WritingFeedback>("writing-check", {
body: { content },
})
if (!res.ok) {
const err = await res.json().catch(() => ({}))
throw new Error((err as { error?: { message?: string } }).error?.message ?? `GLM error ${res.status}`)
}
if (error) throw new Error(error.message ?? "Đã có lỗi khi chấm bài. Vui lòng thử lại.")
if (!data) throw new Error("Phản hồi từ AI không hợp lệ. Vui lòng thử lại.")
const data = await res.json() as { choices: { message: { content: string } }[] }
const raw = data.choices[0]?.message?.content ?? "{}"
// Strip markdown code fences defensively
const cleaned = raw.replace(/^```(?:json)?\s*/i, "").replace(/\s*```$/, "").trim()
try {
return JSON.parse(cleaned) as WritingFeedback
} catch {
throw new Error("Phản hồi từ AI không hợp lệ. Vui lòng thử lại.")
}
return data
}
export function useWritingCheck() {
@@ -73,13 +37,12 @@ export function useWritingCheck() {
}
}
const feedback = await callGlm(content)
const feedback = await callEdgeFunction(content)
if (user) {
// Save to DB (fire-and-forget)
// Save submission to DB (fire-and-forget)
saveWritingSubmission(user.id, content, feedback)
} else {
// Persist guest usage in localStorage
recordWritingCheckUsage()
}