5e4dc8e4
tangwang
翻译架构按“一个翻译服务 +
|
1
2
3
4
5
6
7
8
9
10
11
12
|
"""LLM-based translation backend."""
from __future__ import annotations
import logging
import os
import time
from typing import List, Optional, Sequence, Union
from openai import OpenAI
from config.env_config import DASHSCOPE_API_KEY
|
0fd2f875
tangwang
translate
|
13
14
15
|
from translation.languages import LANGUAGE_LABELS
from translation.prompts import TRANSLATION_PROMPTS
from translation.scenes import normalize_scene_name
|
5e4dc8e4
tangwang
翻译架构按“一个翻译服务 +
|
16
17
18
|
logger = logging.getLogger(__name__)
|
5e4dc8e4
tangwang
翻译架构按“一个翻译服务 +
|
19
20
21
22
23
24
25
26
|
def _build_prompt(
text: str,
*,
source_lang: Optional[str],
target_lang: str,
scene: Optional[str],
) -> str:
|
0fd2f875
tangwang
translate
|
27
28
29
30
|
tgt = str(target_lang or "").strip().lower()
src = str(source_lang or "auto").strip().lower() or "auto"
normalized_scene = normalize_scene_name(scene)
group = TRANSLATION_PROMPTS[normalized_scene]
|
5e4dc8e4
tangwang
翻译架构按“一个翻译服务 +
|
31
|
template = group.get(tgt) or group.get("en")
|
0fd2f875
tangwang
translate
|
32
33
|
if template is None:
raise ValueError(f"Missing llm translation prompt for scene='{normalized_scene}' target_lang='{tgt}'")
|
5e4dc8e4
tangwang
翻译架构按“一个翻译服务 +
|
34
|
|
0fd2f875
tangwang
translate
|
35
36
|
source_lang_label = LANGUAGE_LABELS.get(src, src)
target_lang_label = LANGUAGE_LABELS.get(tgt, tgt)
|
5e4dc8e4
tangwang
翻译架构按“一个翻译服务 +
|
37
38
39
40
41
42
43
44
45
46
47
48
49
50
|
return template.format(
source_lang=source_lang_label,
src_lang_code=src,
target_lang=target_lang_label,
tgt_lang_code=tgt,
text=text,
)
class LLMTranslationBackend:
def __init__(
self,
*,
|
0fd2f875
tangwang
translate
|
51
52
53
54
|
capability_name: str,
model: str,
timeout_sec: float,
base_url: str,
|
5e4dc8e4
tangwang
翻译架构按“一个翻译服务 +
|
55
|
) -> None:
|
0fd2f875
tangwang
translate
|
56
57
58
59
|
self.capability_name = capability_name
self.model = model
self.timeout_sec = float(timeout_sec)
self.base_url = base_url
|
5e4dc8e4
tangwang
翻译架构按“一个翻译服务 +
|
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
|
self.client = self._create_client()
@property
def supports_batch(self) -> bool:
return True
def _create_client(self) -> Optional[OpenAI]:
api_key = DASHSCOPE_API_KEY or os.getenv("DASHSCOPE_API_KEY")
if not api_key:
logger.warning("DASHSCOPE_API_KEY not set; llm translation unavailable")
return None
try:
return OpenAI(api_key=api_key, base_url=self.base_url)
except Exception as exc:
logger.error("Failed to initialize llm translation client: %s", exc, exc_info=True)
return None
def _translate_single(
self,
text: str,
target_lang: str,
source_lang: Optional[str] = None,
|
0fd2f875
tangwang
translate
|
82
|
scene: Optional[str] = None,
|
5e4dc8e4
tangwang
翻译架构按“一个翻译服务 +
|
83
84
85
86
87
88
|
) -> Optional[str]:
if not text or not str(text).strip():
return text
if not self.client:
return None
|
0fd2f875
tangwang
translate
|
89
90
91
92
93
94
|
tgt = str(target_lang or "").strip().lower()
src = str(source_lang or "auto").strip().lower() or "auto"
if scene is None:
raise ValueError("llm translation scene is required")
normalized_scene = normalize_scene_name(scene)
user_prompt = _build_prompt(
|
5e4dc8e4
tangwang
翻译架构按“一个翻译服务 +
|
95
96
97
|
text=text,
source_lang=src,
target_lang=tgt,
|
0fd2f875
tangwang
translate
|
98
|
scene=normalized_scene,
|
5e4dc8e4
tangwang
翻译架构按“一个翻译服务 +
|
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
|
)
start = time.time()
try:
logger.info(
"[llm] Request | src=%s tgt=%s model=%s prompt=%s",
src,
tgt,
self.model,
user_prompt,
)
completion = self.client.chat.completions.create(
model=self.model,
messages=[{"role": "user", "content": user_prompt}],
timeout=self.timeout_sec,
)
content = (completion.choices[0].message.content or "").strip()
latency_ms = (time.time() - start) * 1000
if not content:
logger.warning("[llm] Empty result | src=%s tgt=%s latency=%.1fms", src, tgt, latency_ms)
return None
logger.info(
"[llm] Success | src=%s tgt=%s src_text=%s response=%s latency=%.1fms",
src,
tgt,
text,
content,
latency_ms,
)
return content
except Exception as exc:
latency_ms = (time.time() - start) * 1000
logger.warning(
"[llm] Failed | src=%s tgt=%s latency=%.1fms error=%s",
src,
tgt,
latency_ms,
exc,
exc_info=True,
)
return None
def translate(
self,
text: Union[str, Sequence[str]],
target_lang: str,
source_lang: Optional[str] = None,
|
0fd2f875
tangwang
translate
|
145
|
scene: Optional[str] = None,
|
5e4dc8e4
tangwang
翻译架构按“一个翻译服务 +
|
146
147
148
149
150
151
152
153
154
155
156
157
|
) -> Union[Optional[str], List[Optional[str]]]:
if isinstance(text, (list, tuple)):
results: List[Optional[str]] = []
for item in text:
if item is None:
results.append(None)
continue
results.append(
self._translate_single(
text=str(item),
target_lang=target_lang,
source_lang=source_lang,
|
0fd2f875
tangwang
translate
|
158
|
scene=scene,
|
5e4dc8e4
tangwang
翻译架构按“一个翻译服务 +
|
159
160
161
162
163
164
165
166
|
)
)
return results
return self._translate_single(
text=str(text),
target_lang=target_lang,
source_lang=source_lang,
|
0fd2f875
tangwang
translate
|
167
|
scene=scene,
|
5e4dc8e4
tangwang
翻译架构按“一个翻译服务 +
|
168
|
)
|