test_translation_llm_backend.py 2.72 KB
from types import SimpleNamespace

from translation.backends.llm import LLMTranslationBackend


class _FakeCompletions:
    def __init__(self, responses):
        self.responses = list(responses)
        self.calls = []

    def create(self, *, model, messages, timeout):
        self.calls.append(
            {
                "model": model,
                "messages": messages,
                "timeout": timeout,
            }
        )
        content = self.responses.pop(0)
        return SimpleNamespace(
            choices=[
                SimpleNamespace(
                    message=SimpleNamespace(content=content),
                )
            ]
        )


def _build_backend(monkeypatch, responses):
    fake_completions = _FakeCompletions(responses)
    fake_client = SimpleNamespace(chat=SimpleNamespace(completions=fake_completions))
    monkeypatch.setattr(LLMTranslationBackend, "_create_client", lambda self: fake_client)
    backend = LLMTranslationBackend(
        capability_name="llm",
        model="test-model",
        timeout_sec=5.0,
        base_url="https://example.com",
        api_key="test-key",
    )
    return backend, fake_completions


def test_llm_translate_batch_uses_single_request(monkeypatch):
    backend, fake_completions = _build_backend(monkeypatch, ["1. Dress\n2. Shirt"])

    results = backend.translate(
        ["连衣裙", "衬衫"],
        target_lang="en",
        source_lang="zh",
        scene="sku_name",
    )

    assert results == ["Dress", "Shirt"]
    assert len(fake_completions.calls) == 1
    prompt = fake_completions.calls[0]["messages"][0]["content"]
    assert "Output exactly one line for each input item, in the same order, using this exact format:" in prompt
    assert "1. translation\n2. translation" in prompt
    assert "1. 连衣裙\n2. 衬衫" in prompt


def test_llm_translate_batch_falls_back_to_single_on_invalid_output(monkeypatch):
    backend, fake_completions = _build_backend(monkeypatch, ["Dress\n2. Shirt", "Dress", "Shirt"])

    results = backend.translate(
        ["连衣裙", "衬衫"],
        target_lang="en",
        source_lang="zh",
        scene="sku_name",
    )

    assert results == ["Dress", "Shirt"]
    assert len(fake_completions.calls) == 3


def test_llm_translate_batch_preserves_empty_items(monkeypatch):
    backend, fake_completions = _build_backend(monkeypatch, ["1. Product"])

    results = backend.translate(
        [None, " ", "商品"],
        target_lang="en",
        source_lang="zh",
        scene="general",
    )

    assert results == [None, " ", "Product"]
    assert len(fake_completions.calls) == 1
    prompt = fake_completions.calls[0]["messages"][0]["content"]
    assert "1. 商品" in prompt
    assert "Input:\n1. 商品" in prompt