test_translation_llm_backend.py
2.72 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
from types import SimpleNamespace
from translation.backends.llm import LLMTranslationBackend
class _FakeCompletions:
def __init__(self, responses):
self.responses = list(responses)
self.calls = []
def create(self, *, model, messages, timeout):
self.calls.append(
{
"model": model,
"messages": messages,
"timeout": timeout,
}
)
content = self.responses.pop(0)
return SimpleNamespace(
choices=[
SimpleNamespace(
message=SimpleNamespace(content=content),
)
]
)
def _build_backend(monkeypatch, responses):
fake_completions = _FakeCompletions(responses)
fake_client = SimpleNamespace(chat=SimpleNamespace(completions=fake_completions))
monkeypatch.setattr(LLMTranslationBackend, "_create_client", lambda self: fake_client)
backend = LLMTranslationBackend(
capability_name="llm",
model="test-model",
timeout_sec=5.0,
base_url="https://example.com",
api_key="test-key",
)
return backend, fake_completions
def test_llm_translate_batch_uses_single_request(monkeypatch):
backend, fake_completions = _build_backend(monkeypatch, ["1. Dress\n2. Shirt"])
results = backend.translate(
["连衣裙", "衬衫"],
target_lang="en",
source_lang="zh",
scene="sku_name",
)
assert results == ["Dress", "Shirt"]
assert len(fake_completions.calls) == 1
prompt = fake_completions.calls[0]["messages"][0]["content"]
assert "Output exactly one line for each input item, in the same order, using this exact format:" in prompt
assert "1. translation\n2. translation" in prompt
assert "1. 连衣裙\n2. 衬衫" in prompt
def test_llm_translate_batch_falls_back_to_single_on_invalid_output(monkeypatch):
backend, fake_completions = _build_backend(monkeypatch, ["Dress\n2. Shirt", "Dress", "Shirt"])
results = backend.translate(
["连衣裙", "衬衫"],
target_lang="en",
source_lang="zh",
scene="sku_name",
)
assert results == ["Dress", "Shirt"]
assert len(fake_completions.calls) == 3
def test_llm_translate_batch_preserves_empty_items(monkeypatch):
backend, fake_completions = _build_backend(monkeypatch, ["1. Product"])
results = backend.translate(
[None, " ", "商品"],
target_lang="en",
source_lang="zh",
scene="general",
)
assert results == [None, " ", "Product"]
assert len(fake_completions.calls) == 1
prompt = fake_completions.calls[0]["messages"][0]["content"]
assert "1. 商品" in prompt
assert "Input:\n1. 商品" in prompt