text_encoder.py 10.5 KB
"""
Text embedding encoder using network service.

Generates embeddings via HTTP API service (default localhost:6005).
"""

import sys
import requests
import time
import threading
import numpy as np
import pickle
import redis
import os
from datetime import timedelta
from typing import List, Union, Dict, Any, Optional
import logging

logger = logging.getLogger(__name__)

# Try to import REDIS_CONFIG, but allow import to fail
try:
    from config.env_config import REDIS_CONFIG
except ImportError:
    REDIS_CONFIG = {}


class BgeEncoder:
    """
    Singleton text encoder using network service.

    Thread-safe singleton pattern ensures only one instance exists.
    """
    _instance = None
    _lock = threading.Lock()

    def __new__(cls, service_url: Optional[str] = None):
        with cls._lock:
            if cls._instance is None:
                cls._instance = super(BgeEncoder, cls).__new__(cls)
                resolved_url = service_url or os.getenv("EMBEDDING_SERVICE_URL", "http://localhost:6005")
                logger.info(f"Creating BgeEncoder instance with service URL: {resolved_url}")
                cls._instance.service_url = resolved_url
                cls._instance.endpoint = f"{resolved_url}/embed/text"
                
                # Initialize Redis cache
                try:
                    cls._instance.redis_client = redis.Redis(
                        host=REDIS_CONFIG.get('host', 'localhost'),
                        port=REDIS_CONFIG.get('port', 6479),
                        password=REDIS_CONFIG.get('password'),
                        decode_responses=False,  # Keep binary data as is
                        socket_timeout=REDIS_CONFIG.get('socket_timeout', 1),
                        socket_connect_timeout=REDIS_CONFIG.get('socket_connect_timeout', 1),
                        retry_on_timeout=REDIS_CONFIG.get('retry_on_timeout', False),
                        health_check_interval=10  # 避免复用坏连接
                    )
                    # Test connection
                    cls._instance.redis_client.ping()
                    cls._instance.expire_time = timedelta(days=REDIS_CONFIG.get('cache_expire_days', 180))
                    logger.info("Redis cache initialized for embeddings")
                except Exception as e:
                    logger.warning(f"Failed to initialize Redis cache for embeddings: {e}, continuing without cache")
                    cls._instance.redis_client = None
        return cls._instance

    def _call_service(self, request_data: List[str]) -> List[Any]:
        """
        Call the embedding service API.

        Args:
            request_data: List of texts

        Returns:
            List of embeddings (list[float]) or nulls (None), aligned to input order
        """
        try:
            response = requests.post(
                self.endpoint,
                json=request_data,
                timeout=60
            )
            response.raise_for_status()
            return response.json()
        except requests.exceptions.RequestException as e:
            logger.error(f"BgeEncoder service request failed: {e}", exc_info=True)
            raise

    def encode(
        self,
        sentences: Union[str, List[str]],
        normalize_embeddings: bool = True,
        device: str = 'cpu',
        batch_size: int = 32
    ) -> np.ndarray:
        """
        Encode text into embeddings via network service with Redis caching.

        Args:
            sentences: Single string or list of strings to encode
            normalize_embeddings: Whether to normalize embeddings (ignored for service)
            device: Device parameter ignored for service compatibility
            batch_size: Batch size for processing (used for service requests)

        Returns:
            numpy array of dtype=object, where each element is either:
            - np.ndarray (valid embedding vector) or
            - None (no embedding available for that text)
        """
        # Convert single string to list
        if isinstance(sentences, str):
            sentences = [sentences]

        # Check cache first
        uncached_indices: List[int] = []
        uncached_texts: List[str] = []
        
        # Process response
        # Each element can be np.ndarray or None (表示该文本没有可用的向量)
        embeddings: List[Optional[np.ndarray]] = [None] * len(sentences)

        for i, text in enumerate(sentences):
            cached = self._get_cached_embedding(text, 'en')  # Use 'en' as default language for title embedding
            if cached is not None:
                embeddings[i] = cached
            else:
                uncached_indices.append(i)
                uncached_texts.append(text)
        
        # Prepare request data for uncached texts (after cache check)
        request_data = list(uncached_texts)
        
        # If there are uncached texts, call service
        if uncached_texts:
            try:
                # Call service
                response_data = self._call_service(request_data)

                # Process response
                for i, text in enumerate(uncached_texts):
                    original_idx = uncached_indices[i]
                    if response_data and i < len(response_data):
                        embedding = response_data[i]
                    else:
                        embedding = None

                    if embedding is not None:
                        embedding_array = np.array(embedding, dtype=np.float32)
                        # Validate embedding from service - if invalid, treat as no result
                        if self._is_valid_embedding(embedding_array):
                            embeddings[original_idx] = embedding_array
                            # Cache the embedding
                            self._set_cached_embedding(text, 'en', embedding_array)
                        else:
                            logger.warning(
                                f"Invalid embedding returned from service for text {original_idx} "
                                f"(contains NaN/Inf or invalid shape), treating as no result. "
                                f"Text preview: {text[:50]}..."
                            )
                            embeddings[original_idx] = None
                    else:
                        logger.warning(f"No embedding found for text {original_idx}: {text[:50]}...")
                        embeddings[original_idx] = None

            except Exception as e:
                logger.error(f"Failed to encode texts: {e}", exc_info=True)
                # 出错时不要生成兜底全零向量,保持为 None
                pass
        
        # 返回 numpy 数组(dtype=object),元素为 np.ndarray 或 None
        return np.array(embeddings, dtype=object)

    def encode_batch(
        self,
        texts: List[str],
        batch_size: int = 32,
        device: str = 'cpu'
    ) -> np.ndarray:
        """
        Encode a batch of texts efficiently via network service.

        Args:
            texts: List of texts to encode
            batch_size: Batch size for processing
            device: Device parameter ignored for service compatibility

        Returns:
            numpy array of embeddings
        """
        return self.encode(texts, batch_size=batch_size, device=device)
    
    def _get_cache_key(self, query: str, language: str) -> str:
        """Generate a cache key for the query"""
        return f"embedding:{language}:{query}"
    
    def _is_valid_embedding(self, embedding: np.ndarray) -> bool:
        """
        Check if embedding is valid (not None, correct shape, no NaN/Inf).
        
        Args:
            embedding: Embedding array to validate
            
        Returns:
            True if valid, False otherwise
        """
        if embedding is None:
            return False
        if not isinstance(embedding, np.ndarray):
            return False
        if embedding.size == 0:
            return False
        # Check for NaN or Inf values
        if not np.isfinite(embedding).all():
            return False
        return True
    
    def _get_cached_embedding(self, query: str, language: str) -> Optional[np.ndarray]:
        """Get embedding from cache if exists (with sliding expiration)"""
        if not self.redis_client:
            return None
            
        try:
            cache_key = self._get_cache_key(query, language)
            cached_data = self.redis_client.get(cache_key)
            if cached_data:
                embedding = pickle.loads(cached_data)
                # Validate cached embedding - if invalid, ignore cache and return None
                if self._is_valid_embedding(embedding):
                    logger.debug(f"Cache hit for embedding: {query}")
                    # Update expiration time on access (sliding expiration)
                    self.redis_client.expire(cache_key, self.expire_time)
                    return embedding
                else:
                    logger.warning(
                        f"Invalid embedding found in cache (contains NaN/Inf or invalid shape), "
                        f"ignoring cache for query: {query[:50]}..."
                    )
                    # Delete invalid cache entry
                    try:
                        self.redis_client.delete(cache_key)
                    except Exception as e:
                        logger.debug(f"Failed to delete invalid cache entry: {e}")
                    return None
            return None
        except Exception as e:
            logger.error(f"Error retrieving embedding from cache: {e}")
            return None
    
    def _set_cached_embedding(self, query: str, language: str, embedding: np.ndarray) -> bool:
        """Store embedding in cache"""
        if not self.redis_client:
            return False
            
        try:
            cache_key = self._get_cache_key(query, language)
            serialized_data = pickle.dumps(embedding)
            self.redis_client.setex(
                cache_key,
                self.expire_time,
                serialized_data
            )
            logger.debug(f"Successfully cached embedding for query: {query}")
            return True
        except (redis.exceptions.BusyLoadingError, redis.exceptions.ConnectionError, 
                redis.exceptions.TimeoutError, redis.exceptions.RedisError) as e:
            logger.warning(f"Redis error storing embedding in cache: {e}")
            return False
        except Exception as e:
            logger.error(f"Error storing embedding in cache: {e}")
            return False