diff --git a/.env b/.env index 5f4d9ca..4370753 100644 --- a/.env +++ b/.env @@ -31,3 +31,7 @@ CACHE_DIR=.cache # Frontend API Base URL API_BASE_URL=http://120.76.41.98:6002 + + +DASHSCOPE_API_KEY=sk-c3b8d4db061840aa8effb748df2a997b +OPENAI_API_KEY=sk-HvmTMKtuznibZ75l7L2uF2jiaYocCthqd8Cbdkl09KTE7Ft0 diff --git a/api/routes/search.py b/api/routes/search.py index 26b0730..3b18bfc 100644 --- a/api/routes/search.py +++ b/api/routes/search.py @@ -117,7 +117,7 @@ async def search(request: SearchRequest, http_request: Request): performance_summary = context.get_summary() if context else None # Convert to response model - return SearchResponse( + response = SearchResponse( results=result.results, total=result.total, max_score=result.max_score, @@ -130,6 +130,18 @@ async def search(request: SearchRequest, http_request: Request): debug_info=result.debug_info ) + # Log complete response JSON + context.logger.info( + "Search response | " + f"Total results: {response.total} | " + f"Max score: {response.max_score:.4f} | " + f"Time: {response.took_ms}ms | " + f"Response: {response.model_dump_json()}", + extra={'reqid': context.reqid, 'uid': context.uid} + ) + + return response + except Exception as e: # Log error in context if context: @@ -203,7 +215,7 @@ async def search_by_image(request: ImageSearchRequest, http_request: Request): # Include performance summary in response performance_summary = context.get_summary() if context else None - return SearchResponse( + response = SearchResponse( results=result.results, total=result.total, max_score=result.max_score, @@ -215,6 +227,18 @@ async def search_by_image(request: ImageSearchRequest, http_request: Request): performance_info=performance_summary ) + # Log complete response JSON + context.logger.info( + "Image search response | " + f"Total results: {response.total} | " + f"Max score: {response.max_score:.4f} | " + f"Time: {response.took_ms}ms | " + f"Response: {response.model_dump_json()}", + extra={'reqid': context.reqid, 'uid': context.uid} + ) + + return response + except ValueError as e: if context: context.set_error(e) diff --git a/api/translator_app.py b/api/translator_app.py index 7f41913..1c710de 100644 --- a/api/translator_app.py +++ b/api/translator_app.py @@ -11,7 +11,7 @@ uvicorn api.translator_app:app --host 0.0.0.0 --port 6006 --reload 使用说明: Translation HTTP Service -This service provides a RESTful API for text translation using DeepL API. +This service provides a RESTful API for text translation using Qwen (default) or DeepL API. The service runs on port 6006 and provides a simple translation endpoint. API Endpoint: @@ -21,7 +21,8 @@ Request Body (JSON): { "text": "要翻译的文本", "target_lang": "en", # Required: target language code (zh, en, ru, etc.) - "source_lang": "zh" # Optional: source language code (auto-detect if not provided) + "source_lang": "zh", # Optional: source language code (auto-detect if not provided) + "model": "qwen" # Optional: translation model ("qwen" or "deepl", default: "qwen") } Response (JSON): @@ -52,7 +53,17 @@ Usage Examples: "target_lang": "zh" }' -3. Translate Russian to English: +3. Translate using DeepL model: + curl -X POST http://localhost:6006/translate \ + -H "Content-Type: application/json" \ + -d '{ + "text": "商品名称", + "target_lang": "en", + "source_lang": "zh", + "model": "deepl" + }' + +4. Translate Russian to English: curl -X POST http://localhost:6006/translate \ -H "Content-Type: application/json" \ -d '{ @@ -77,7 +88,7 @@ import sys import logging import argparse import uvicorn -from typing import Optional +from typing import Optional, Dict from fastapi import FastAPI, HTTPException from fastapi.responses import JSONResponse from fastapi.middleware.cors import CORSMiddleware @@ -87,7 +98,7 @@ from pydantic import BaseModel, Field sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from query.translator import Translator -from config.env_config import DEEPL_AUTH_KEY, REDIS_CONFIG +from config.env_config import DEEPL_AUTH_KEY, DASHSCOPE_API_KEY, REDIS_CONFIG # Configure logging logging.basicConfig( @@ -99,22 +110,22 @@ logger = logging.getLogger(__name__) # Fixed translation prompt TRANSLATION_PROMPT = "Translate the original text into an English product SKU name. Requirements: Ensure accurate and complete transmission of the original information, with concise, clear, authentic, and professional language." -# Global translator instance -_translator: Optional[Translator] = None +# Global translator instances cache (keyed by model) +_translators: Dict[str, Translator] = {} -def init_translator(): - """Initialize translator instance.""" - global _translator - if _translator is None: - logger.info("Initializing translator...") - _translator = Translator( - api_key=DEEPL_AUTH_KEY, +def get_translator(model: str = "qwen") -> Translator: + """Get or create translator instance for the specified model.""" + global _translators + if model not in _translators: + logger.info(f"Initializing translator with model: {model}...") + _translators[model] = Translator( + model=model, use_cache=True, timeout=10 ) - logger.info("Translator initialized") - return _translator + logger.info(f"Translator initialized with model: {model}") + return _translators[model] # Request/Response models @@ -123,13 +134,15 @@ class TranslationRequest(BaseModel): text: str = Field(..., description="Text to translate") target_lang: str = Field(..., description="Target language code (zh, en, ru, etc.)") source_lang: Optional[str] = Field(None, description="Source language code (optional, auto-detect if not provided)") + model: Optional[str] = Field("qwen", description="Translation model: 'qwen' (default) or 'deepl'") class Config: json_schema_extra = { "example": { "text": "商品名称", "target_lang": "en", - "source_lang": "zh" + "source_lang": "zh", + "model": "qwen" } } @@ -141,12 +154,13 @@ class TranslationResponse(BaseModel): source_lang: Optional[str] = Field(None, description="Source language code (detected or provided)") translated_text: str = Field(..., description="Translated text") status: str = Field(..., description="Translation status") + model: str = Field(..., description="Translation model used") # Create FastAPI app app = FastAPI( title="Translation Service API", - description="RESTful API for text translation using DeepL", + description="RESTful API for text translation using Qwen (default) or DeepL", version="1.0.0", docs_url="/docs", redoc_url="/redoc" @@ -166,9 +180,11 @@ app.add_middleware( async def startup_event(): """Initialize translator on startup.""" logger.info("Starting Translation Service API on port 6006") + # Get default model from environment variable or use 'qwen' + default_model = os.getenv("TRANSLATION_MODEL", "qwen") try: - init_translator() - logger.info("Translation service ready") + get_translator(model=default_model) + logger.info(f"Translation service ready with default model: {default_model}") except Exception as e: logger.error(f"Failed to initialize translator: {e}", exc_info=True) logger.warning("Service will start but translation may not work correctly") @@ -178,10 +194,13 @@ async def startup_event(): async def health_check(): """Health check endpoint.""" try: - translator = init_translator() + default_model = os.getenv("TRANSLATION_MODEL", "qwen") + translator = get_translator(model=default_model) return { "status": "healthy", "service": "translation", + "default_model": default_model, + "available_models": list(_translators.keys()), "translator_initialized": translator is not None, "cache_enabled": translator.use_cache if translator else False } @@ -203,6 +222,8 @@ async def translate(request: TranslationRequest): Uses a fixed prompt optimized for product SKU name translation. The translation is cached in Redis for performance. + + Supports both Qwen (default) and DeepL models via the 'model' parameter. """ if not request.text or not request.text.strip(): raise HTTPException( @@ -216,8 +237,17 @@ async def translate(request: TranslationRequest): detail="target_lang is required" ) + # Validate model parameter + model = request.model.lower() if request.model else "qwen" + if model not in ['qwen', 'deepl']: + raise HTTPException( + status_code=400, + detail=f"Invalid model: {model}. Supported models: 'qwen', 'deepl'" + ) + try: - translator = init_translator() + # Get translator instance for the specified model + translator = get_translator(model=model) # Translate using the fixed prompt translated_text = translator.translate( @@ -238,7 +268,8 @@ async def translate(request: TranslationRequest): target_lang=request.target_lang, source_lang=request.source_lang, translated_text=translated_text, - status="success" + status="success", + model=translator.model ) except HTTPException: diff --git a/config/env_config.py b/config/env_config.py index 837201d..c15e875 100644 --- a/config/env_config.py +++ b/config/env_config.py @@ -39,6 +39,9 @@ REDIS_CONFIG = { # DeepL API Key DEEPL_AUTH_KEY = os.getenv('DEEPL_AUTH_KEY') +# DashScope API Key (for Qwen models) +DASHSCOPE_API_KEY = os.getenv('DASHSCOPE_API_KEY') + # API Service Configuration API_HOST = os.getenv('API_HOST', '0.0.0.0') API_PORT = int(os.getenv('API_PORT', 6002)) diff --git a/docs/翻译模块说明.md b/docs/翻译模块说明.md new file mode 100644 index 0000000..dd3718d --- /dev/null +++ b/docs/翻译模块说明.md @@ -0,0 +1,256 @@ +# 翻译模块说明(Qwen / DeepL) + +本文档汇总翻译模块的**接口使用说明**与**Python 模块用法**,对应代码: + +- HTTP 服务:`api/translator_app.py` +- Python 模块:`query/translator.py` + +--- + +## 1. 功能概述 + +当前翻译模块支持两种后端: + +- **Qwen(默认)**:通过阿里云百炼 DashScope 的 OpenAI 兼容接口调用 `qwen-mt-flash` +- **DeepL**:通过 DeepL API 调用翻译(保留原有能力) + +两种方式均支持: + +- **Redis 缓存**(如启用):同文案同目标语言命中缓存直接返回 +- **`source_lang` 自动检测**:当 `source_lang` 为空或 `"auto"` 时启用自动检测(Qwen 使用 `"auto"`) + +--- + +## 2. 环境变量与配置 + +项目会在 `config/env_config.py` 中加载项目根目录的 `.env`,常用变量如下: + +```env +# Qwen / DashScope +DASHSCOPE_API_KEY=sk-xxx + +# DeepL +DEEPL_AUTH_KEY=xxx + +# 可选:翻译服务默认模型(HTTP 服务启动后若请求不传 model,则使用此默认值) +TRANSLATION_MODEL=qwen # 或 deepl +``` + +说明: + +- **Qwen** 使用 `DASHSCOPE_API_KEY` +- **DeepL** 使用 `DEEPL_AUTH_KEY` +- `.env` 中的 `OPENAI_API_KEY` 不是本翻译模块必须项(当前实现用的是 `DASHSCOPE_API_KEY`) + +--- + +## 3. HTTP 翻译服务(`api/translator_app.py`) + +### 3.1 启动命令 + +推荐(热更新): + +```bash +cd /home/tw/SearchEngine +uvicorn api.translator_app:app --host 0.0.0.0 --port 6006 --reload +``` + +指定默认模型(不传请求 `model` 时生效): + +```bash +cd /home/tw/SearchEngine +export TRANSLATION_MODEL=qwen # 或 deepl +uvicorn api.translator_app:app --host 0.0.0.0 --port 6006 --reload +``` + +### 3.2 接口列表 + +- **GET** `/health`:健康检查(返回默认模型、已初始化模型列表等) +- **POST** `/translate`:翻译文本 +- **GET** `/docs`:Swagger UI + +### 3.3 `/translate` 请求参数 + +请求体(JSON): + +```json +{ + "text": "要翻译的文本", + "target_lang": "en", + "source_lang": "auto", + "model": "qwen" +} +``` + +- **text**:必填,待翻译文本 +- **target_lang**:必填,目标语言代码(见“语言支持”) +- **source_lang**:可选,源语言代码;不传或传 `"auto"` 时自动检测 +- **model**:可选,`"qwen"` 或 `"deepl"`;默认 `"qwen"` + +### 3.4 `/translate` 返回参数 + +响应体(JSON,成功时): + +```json +{ + "text": "商品名称", + "target_lang": "en", + "source_lang": "zh", + "translated_text": "Product name", + "status": "success", + "model": "qwen" +} +``` + +### 3.5 请求示例(curl) + +健康检查: + +```bash +curl http://localhost:6006/health +``` + +默认(qwen)中文 → 英文: + +```bash +curl -X POST http://localhost:6006/translate \ + -H "Content-Type: application/json" \ + -d '{"text":"我看到这个视频后没有笑","target_lang":"en","source_lang":"auto"}' +``` + +显式指定 qwen,英文 → 简体中文: + +```bash +curl -X POST http://localhost:6006/translate \ + -H "Content-Type: application/json" \ + -d '{"text":"Product name","target_lang":"zh","source_lang":"en","model":"qwen"}' +``` + +繁体中文(`zh_tw`)测试: + +```bash +curl -X POST http://localhost:6006/translate \ + -H "Content-Type: application/json" \ + -d '{"text":"商品名稱","target_lang":"zh_tw","source_lang":"auto","model":"qwen"}' +``` + +切换 DeepL: + +```bash +curl -X POST http://localhost:6006/translate \ + -H "Content-Type: application/json" \ + -d '{"text":"商品名称","target_lang":"en","source_lang":"zh","model":"deepl"}' +``` + +### 3.6 关于提示词(Prompt) + +HTTP 服务内部使用了固定提示词 `TRANSLATION_PROMPT`(适用于“商品 SKU 英文名”场景),并通过 `prompt` 参数传入 `Translator.translate()`。 + +- **DeepL**:`prompt` 会作为 DeepL 的 `context` 使用(影响翻译但不被翻译) +- **Qwen**:当前实现未将 `prompt/context` 传给 Qwen 的 `translation_options`(即对 Qwen 不生效) + +--- + +## 4. Python 翻译模块(`query/translator.py`) + +### 4.1 基本用法 + +```python +from query.translator import Translator + +# 默认使用 qwen +translator = Translator() + +result = translator.translate( + text="我看到这个视频后没有笑", + target_lang="en", + source_lang="auto", +) +print(result) +``` + +显式选择模型: + +```python +translator_qwen = Translator(model="qwen") +translator_deepl = Translator(model="deepl") +``` + +### 4.2 关键参数 + +- `Translator(model="qwen" | "deepl")`:选择翻译模型,默认 `"qwen"` +- `translate(text, target_lang, source_lang=None, context=None, prompt=None)`: + - `target_lang` / `source_lang`:语言代码(见“语言支持”) + - `source_lang` 为空或 `"auto"`:自动检测 + - `prompt`: + - DeepL:作为 `context` 使用 + - Qwen:当前未使用 + +### 4.3 缓存(Redis) + +`Translator(use_cache=True)` 时会连接 Redis 并缓存翻译结果。 + +- Redis 连接配置来自 `config/env_config.py` 的 `REDIS_CONFIG` +- 缓存 key 前缀默认 `trans`(可用 `REDIS_TRANSLATION_CACHE_PREFIX` 覆盖) + +--- + +## 5. Qwen 语言支持(按 qwen-mt-plus/flash/turbo 标准) + +> 以下为 Qwen 翻译模型支持的语言(**代码 → 英文名**),并已用于 `query/translator.py` 的映射。 + +| 代码 | 英文名 | +|------|--------| +| en | English | +| zh | Chinese | +| zh_tw | Traditional Chinese | +| ru | Russian | +| ja | Japanese | +| ko | Korean | +| es | Spanish | +| fr | French | +| pt | Portuguese | +| de | German | +| it | Italian | +| th | Thai | +| vi | Vietnamese | +| id | Indonesian | +| ms | Malay | +| ar | Arabic | +| hi | Hindi | +| he | Hebrew | +| my | Burmese | +| ta | Tamil | +| ur | Urdu | +| bn | Bengali | +| pl | Polish | +| nl | Dutch | +| ro | Romanian | +| tr | Turkish | +| km | Khmer | +| lo | Lao | +| yue | Cantonese | +| cs | Czech | +| el | Greek | +| sv | Swedish | +| hu | Hungarian | +| da | Danish | +| fi | Finnish | +| uk | Ukrainian | +| bg | Bulgarian | + +--- + +## 6. 常见问题(FAQ) + +### 6.1 Qwen 调用报错 / 无法初始化 + +- 确认 `.env` 中已配置 `DASHSCOPE_API_KEY` +- 确认安装依赖:`openai`(Python 包) +- 如在海外地域使用模型,将 `base_url` 切换为 `https://dashscope-intl.aliyuncs.com/compatible-mode/v1` + +### 6.2 DeepL 返回 403 / 翻译失败 + +- 确认 `.env` 中已配置 `DEEPL_AUTH_KEY` +- 若使用的是 Pro key,请使用 `https://api.deepl.com/v2/translate`(当前代码即为该地址) + diff --git a/query/translator.py b/query/translator.py index 7db2555..92d7af2 100644 --- a/query/translator.py +++ b/query/translator.py @@ -1,16 +1,43 @@ """ Translation service for multi-language query support. -Supports DeepL API for high-quality translations. +Supports multiple translation models: +- Qwen (default): Alibaba Cloud DashScope API using qwen-mt-flash model +- DeepL: DeepL API for high-quality translations +使用方法 (Usage): -#### 官方文档: -https://developers.deepl.com/api-reference/translate/request-translation -##### +```python +from query.translator import Translator + +# 使用默认的 qwen 模型(推荐) +translator = Translator() # 默认使用 qwen 模型 + +# 或显式指定模型 +translator = Translator(model='qwen') # 使用 qwen 模型 +translator = Translator(model='deepl') # 使用 DeepL 模型 + +# 翻译文本 +result = translator.translate( + text="我看到这个视频后没有笑", + target_lang="en", + source_lang="auto" # 自动检测源语言 +) +``` +配置说明 (Configuration): +- Qwen 模型需要设置 DASHSCOPE_API_KEY 环境变量(在 .env 文件中) +- DeepL 模型需要设置 DEEPL_AUTH_KEY 环境变量(在 .env 文件中) +Qwen 模型参考文档: +- 官方文档:https://help.aliyun.com/zh/model-studio/get-api-key +- 模型:qwen-mt-flash(快速翻译模型) + +DeepL 官方文档: +https://developers.deepl.com/api-reference/translate/request-translation """ +import os import requests import re import redis @@ -21,18 +48,21 @@ import logging logger = logging.getLogger(__name__) -# Try to import DEEPL_AUTH_KEY and REDIS_CONFIG, but allow import to fail -try: - from config.env_config import DEEPL_AUTH_KEY, REDIS_CONFIG -except ImportError: - DEEPL_AUTH_KEY = None - REDIS_CONFIG = {} +from config.env_config import DEEPL_AUTH_KEY, DASHSCOPE_API_KEY, REDIS_CONFIG +from openai import OpenAI class Translator: - """Multi-language translator using DeepL API.""" + """ + Multi-language translator supporting Qwen and DeepL APIs. + + Default model is 'qwen' which uses Alibaba Cloud DashScope API. + """ DEEPL_API_URL = "https://api.deepl.com/v2/translate" # Pro tier + QWEN_BASE_URL = "https://dashscope.aliyuncs.com/compatible-mode/v1" # 北京地域 + # 如果使用新加坡地域的模型,需要将base_url替换为:https://dashscope-intl.aliyuncs.com/compatible-mode/v1 + QWEN_MODEL = "qwen-mt-flash" # 快速翻译模型 # Language code mapping LANG_CODE_MAP = { @@ -50,6 +80,7 @@ class Translator: def __init__( self, + model: str = "qwen", api_key: Optional[str] = None, use_cache: bool = True, timeout: int = 10, @@ -60,21 +91,40 @@ class Translator: Initialize translator. Args: - api_key: DeepL API key (or None to use from config/env) + model: Translation model to use. Options: 'qwen' (default) or 'deepl' + api_key: API key for the selected model (or None to use from config/env) use_cache: Whether to cache translations timeout: Request timeout in seconds - glossary_id: DeepL glossary ID for custom terminology (optional) + glossary_id: DeepL glossary ID for custom terminology (optional, only for DeepL) translation_context: Context hint for translation (e.g., "e-commerce", "product search") """ + self.model = model.lower() + if self.model not in ['qwen', 'deepl']: + raise ValueError(f"Unsupported model: {model}. Supported models: 'qwen', 'deepl'") + # Get API key from config if not provided - if api_key is None and DEEPL_AUTH_KEY: - api_key = DEEPL_AUTH_KEY + if api_key is None: + if self.model == 'qwen': + api_key = DASHSCOPE_API_KEY or os.getenv("DASHSCOPE_API_KEY") + else: # deepl + api_key = DEEPL_AUTH_KEY or os.getenv("DEEPL_AUTH_KEY") self.api_key = api_key self.timeout = timeout self.use_cache = use_cache self.glossary_id = glossary_id self.translation_context = translation_context or "e-commerce product search" + + # Initialize OpenAI client for Qwen if needed + self.qwen_client = None + if self.model == 'qwen': + if not self.api_key: + logger.warning("DASHSCOPE_API_KEY not set. Qwen translation will not work.") + else: + self.qwen_client = OpenAI( + api_key=self.api_key, + base_url=self.QWEN_BASE_URL, + ) # Initialize Redis cache if enabled if use_cache: @@ -119,7 +169,7 @@ class Translator: Args: text: Text to translate target_lang: Target language code ('zh', 'en', 'ru', etc.) - source_lang: Source language code (optional, auto-detect if None) + source_lang: Source language code (option al, auto-detect if None) context: Additional context for translation (overrides default context) prompt: Translation prompt/instruction (optional, for better translation quality) @@ -174,13 +224,17 @@ class Translator: ) return text - # Translate using DeepL (Pro endpoint only, no free fallback) + # Translate using selected model logger.info( - f"[Translator] Translation request | Original text: '{text}' | Target language: {target_lang} | " + f"[Translator] Translation request | Model: {self.model} | Original text: '{text}' | Target language: {target_lang} | " f"Source language: {source_lang or 'auto'} | Context: {translation_context} | " f"Prompt: {'yes' if prompt else 'no'} | Status: Starting translation" ) - result = self._translate_deepl(text, target_lang, source_lang, translation_context, prompt) + + if self.model == 'qwen': + result = self._translate_qwen(text, target_lang, source_lang, translation_context, prompt) + else: # deepl + result = self._translate_deepl(text, target_lang, source_lang, translation_context, prompt) # If still failed, return original text with warning if result is None: @@ -201,6 +255,122 @@ class Translator: return result + def _translate_qwen( + self, + text: str, + target_lang: str, + source_lang: Optional[str], + context: Optional[str] = None, + prompt: Optional[str] = None + ) -> Optional[str]: + """ + Translate using Qwen MT Flash model via Alibaba Cloud DashScope API. + + Args: + text: Text to translate + target_lang: Target language code ('zh', 'en', 'ru', etc.) + source_lang: Source language code (optional, 'auto' if None) + context: Context hint for translation (optional) + prompt: Translation prompt/instruction (optional) + + Returns: + Translated text or None if translation fails + """ + if not self.qwen_client: + logger.error("[Translator] Qwen client not initialized. Check DASHSCOPE_API_KEY.") + return None + + # Qwen (qwen-mt-plus/flash/turbo) supported languages mapping + # 标准来自:你提供的“语言 / 英文名 / 代码”表 + qwen_lang_map = { + "en": "English", + "zh": "Chinese", + "zh_tw": "Traditional Chinese", + "ru": "Russian", + "ja": "Japanese", + "ko": "Korean", + "es": "Spanish", + "fr": "French", + "pt": "Portuguese", + "de": "German", + "it": "Italian", + "th": "Thai", + "vi": "Vietnamese", + "id": "Indonesian", + "ms": "Malay", + "ar": "Arabic", + "hi": "Hindi", + "he": "Hebrew", + "my": "Burmese", + "ta": "Tamil", + "ur": "Urdu", + "bn": "Bengali", + "pl": "Polish", + "nl": "Dutch", + "ro": "Romanian", + "tr": "Turkish", + "km": "Khmer", + "lo": "Lao", + "yue": "Cantonese", + "cs": "Czech", + "el": "Greek", + "sv": "Swedish", + "hu": "Hungarian", + "da": "Danish", + "fi": "Finnish", + "uk": "Ukrainian", + "bg": "Bulgarian", + } + + # Convert target language + target_lang_normalized = target_lang.lower() + target_lang_qwen = qwen_lang_map.get(target_lang_normalized, target_lang.capitalize()) + + # Convert source language + source_lang_normalized = (source_lang or "").strip().lower() + if not source_lang_normalized or source_lang_normalized == "auto": + source_lang_qwen = "auto" + else: + source_lang_qwen = qwen_lang_map.get(source_lang_normalized, source_lang.capitalize()) + + # Prepare translation options + translation_options = { + "source_lang": source_lang_qwen, + "target_lang": target_lang_qwen, + } + + # Prepare messages + messages = [ + { + "role": "user", + "content": text + } + ] + + try: + completion = self.qwen_client.chat.completions.create( + model=self.QWEN_MODEL, + messages=messages, + extra_body={ + "translation_options": translation_options + } + ) + + translated_text = completion.choices[0].message.content.strip() + + logger.debug( + f"[Translator] Qwen API response success | Original text: '{text}' | Target language: {target_lang_qwen} | " + f"Translation result: '{translated_text}'" + ) + return translated_text + + except Exception as e: + logger.error( + f"[Translator] Qwen API request exception | Original text: '{text}' | Target language: {target_lang_qwen} | " + f"Error: {e}", exc_info=True + ) + return None + def _translate_deepl( self, text: str, -- libgit2 0.21.2