This commit is contained in:
2026-04-12 18:54:59 +07:00
parent 28e866a64e
commit ec3d400e8a
71 changed files with 7888 additions and 333 deletions

View File

@@ -1,28 +1,89 @@
import { useMutation } from "@tanstack/react-query"
import { supabase } from "@/lib/supabase"
import { canUseWritingCheck, recordWritingCheckUsage } from "@/utils/rate-limiter"
import { useAuthStore } from "@/store/auth-store"
import { saveWritingSubmission, countTodayWritingSubmissions } from "@/lib/progress-service"
import type { WritingFeedback } from "@/types"
interface WritingFeedback {
score: string
grammar: string[]
vocabulary: string[]
structure: string
improved_version: string
summary: string
const AUTH_DAILY_LIMIT = 10
const GUEST_DAILY_LIMIT = 3
const GLM_BASE_URL = "https://open.bigmodel.cn/api/paas/v4"
const GLM_API_KEY = import.meta.env.VITE_GLM_API_KEY as string
const GLM_MODEL = (import.meta.env.VITE_GLM_MODEL as string) || "GLM-4-32B-0414-128K"
// Keep system prompt concise — fewer tokens = more room for output.
// improved_version omitted from schema to reduce output length; added back as optional.
const SYSTEM_PROMPT = `You are an expert English writing teacher for TOEIC and IELTS.
Respond ONLY with valid JSON, no markdown:
{"score":"6.5","grammar":["issue + fix in Vietnamese"],"vocabulary":["observation in Vietnamese"],"structure":"2 sentences in Vietnamese","improved_version":"full improved text","summary":"2 sentences in Vietnamese"}`
async function callGlm(content: string): Promise<WritingFeedback> {
const res = await fetch(`${GLM_BASE_URL}/chat/completions`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${GLM_API_KEY}`,
},
body: JSON.stringify({
model: GLM_MODEL,
messages: [
{ role: "system", content: SYSTEM_PROMPT },
{ role: "user", content: `Analyse:\n\n${content.slice(0, 1500)}` },
],
temperature: 0.3,
max_tokens: 2500,
// Force JSON output mode (OpenAI-compatible, supported by GLM)
response_format: { type: "json_object" },
}),
})
if (!res.ok) {
const err = await res.json().catch(() => ({}))
throw new Error((err as { error?: { message?: string } }).error?.message ?? `GLM error ${res.status}`)
}
const data = await res.json() as { choices: { message: { content: string } }[] }
const raw = data.choices[0]?.message?.content ?? "{}"
// Strip markdown code fences defensively
const cleaned = raw.replace(/^```(?:json)?\s*/i, "").replace(/\s*```$/, "").trim()
try {
return JSON.parse(cleaned) as WritingFeedback
} catch {
throw new Error("Phản hồi từ AI không hợp lệ. Vui lòng thử lại.")
}
}
export function useWritingCheck() {
return useMutation({
mutationFn: async (content: string): Promise<WritingFeedback> => {
if (!canUseWritingCheck()) {
throw new Error("Bạn đã dùng hết 3 lần kiểm tra hôm nay. Quay lại vào ngày mai!")
const user = useAuthStore.getState().user
if (user) {
// Server-side rate limit for authenticated users (10/day)
const usedToday = await countTodayWritingSubmissions(user.id)
if (usedToday >= AUTH_DAILY_LIMIT) {
throw new Error(`Bạn đã dùng hết ${AUTH_DAILY_LIMIT} lần kiểm tra hôm nay. Quay lại vào ngày mai!`)
}
} else {
// localStorage rate limit for guests (3/day)
if (!canUseWritingCheck()) {
throw new Error(`Bạn đã dùng hết ${GUEST_DAILY_LIMIT} lần kiểm tra hôm nay. Đăng ký để được 10 lần/ngày!`)
}
}
const { data, error } = await supabase.functions.invoke("writing-check", {
body: { content },
})
if (error) throw error
recordWritingCheckUsage()
return data as WritingFeedback
const feedback = await callGlm(content)
if (user) {
// Save to DB (fire-and-forget)
saveWritingSubmission(user.id, content, feedback)
} else {
// Persist guest usage in localStorage
recordWritingCheckUsage()
}
return feedback
},
})
}