+ ${source.price ? `
💰 ¥${escapeHtml(source.price)}` : ''}
${source.categoryName ? `
📁 ${escapeHtml(source.categoryName)}` : ''}
${source.brandName ? `
🏷️ ${escapeHtml(source.brandName)}` : ''}
${source.supplierName ? `
🏭 ${escapeHtml(source.supplierName)}` : ''}
@@ -227,6 +275,236 @@ function getLanguageName(code) {
return names[code] || code;
}
+// Display aggregations
+function displayAggregations(aggregations) {
+ if (!aggregations || Object.keys(aggregations).length === 0) {
+ document.getElementById('aggregationPanel').style.display = 'none';
+ return;
+ }
+
+ document.getElementById('aggregationPanel').style.display = 'block';
+ const aggregationResultsDiv = document.getElementById('aggregationResults');
+
+ let html = '';
+
+ // Category aggregation
+ if (aggregations.category_stats && aggregations.category_stats.buckets) {
+ html += `
+
+
商品分类
+
+ `;
+
+ aggregations.category_stats.buckets.forEach(bucket => {
+ const key = bucket.key;
+ const count = bucket.doc_count;
+ const isChecked = currentFilters.categoryName_keyword && currentFilters.categoryName_keyword.includes(key);
+
+ html += `
+
+ `;
+ });
+
+ html += '
';
+ }
+
+ // Brand aggregation
+ if (aggregations.brand_stats && aggregations.brand_stats.buckets) {
+ html += `
+
+
品牌
+
+ `;
+
+ aggregations.brand_stats.buckets.forEach(bucket => {
+ const key = bucket.key;
+ const count = bucket.doc_count;
+ const isChecked = currentFilters.brandName_keyword && currentFilters.brandName_keyword.includes(key);
+
+ html += `
+
+ `;
+ });
+
+ html += '
';
+ }
+
+ // Supplier aggregation
+ if (aggregations.supplier_stats && aggregations.supplier_stats.buckets) {
+ html += `
+
+
供应商
+
+ `;
+
+ aggregations.supplier_stats.buckets.slice(0, 5).forEach(bucket => {
+ const key = bucket.key;
+ const count = bucket.doc_count;
+ const isChecked = currentFilters.supplierName_keyword && currentFilters.supplierName_keyword.includes(key);
+
+ html += `
+
+ `;
+ });
+
+ html += '
';
+ }
+
+ // Price range aggregation
+ if (aggregations.price_ranges && aggregations.price_ranges.buckets) {
+ html += `
+
+
价格区间
+
+ `;
+
+ aggregations.price_ranges.buckets.forEach(bucket => {
+ const key = bucket.key;
+ const count = bucket.doc_count;
+ const isChecked = currentFilters.price_ranges && currentFilters.price_ranges.includes(key);
+
+ const priceLabel = {
+ '0-50': '¥0-50',
+ '50-100': '¥50-100',
+ '100-200': '¥100-200',
+ '200+': '¥200+'
+ };
+
+ html += `
+
+ `;
+ });
+
+ html += '
';
+ }
+
+ aggregationResultsDiv.innerHTML = html;
+}
+
+// Display active filters
+function displayActiveFilters() {
+ const activeFiltersDiv = document.getElementById('activeFilters');
+
+ if (Object.keys(currentFilters).length === 0) {
+ activeFiltersDiv.innerHTML = '';
+ return;
+ }
+
+ let html = '
';
+
+ Object.entries(currentFilters).forEach(([field, values]) => {
+ if (Array.isArray(values)) {
+ values.forEach(value => {
+ let displayValue = value;
+ if (field === 'price_ranges') {
+ const priceLabel = {
+ '0-50': '¥0-50',
+ '50-100': '¥50-100',
+ '100-200': '¥100-200',
+ '200+': '¥200+'
+ };
+ displayValue = priceLabel[value] || value;
+ }
+
+ html += `
+
+ ${escapeHtml(displayValue)}
+
+
+ `;
+ });
+ }
+ });
+
+ html += `
`;
+ activeFiltersDiv.innerHTML = html;
+}
+
+// Toggle filter
+function toggleFilter(field, value, checked) {
+ if (checked) {
+ if (!currentFilters[field]) {
+ currentFilters[field] = [];
+ }
+ if (!currentFilters[field].includes(value)) {
+ currentFilters[field].push(value);
+ }
+ } else {
+ if (currentFilters[field]) {
+ const index = currentFilters[field].indexOf(value);
+ if (index > -1) {
+ currentFilters[field].splice(index, 1);
+ }
+ if (currentFilters[field].length === 0) {
+ delete currentFilters[field];
+ }
+ }
+ }
+
+ // Re-run search with new filters
+ performSearch();
+}
+
+// Toggle price filter
+function togglePriceFilter(value, checked) {
+ if (checked) {
+ if (!currentFilters.price_ranges) {
+ currentFilters.price_ranges = [];
+ }
+ if (!currentFilters.price_ranges.includes(value)) {
+ currentFilters.price_ranges.push(value);
+ }
+ } else {
+ if (currentFilters.price_ranges) {
+ const index = currentFilters.price_ranges.indexOf(value);
+ if (index > -1) {
+ currentFilters.price_ranges.splice(index, 1);
+ }
+ if (currentFilters.price_ranges.length === 0) {
+ delete currentFilters.price_ranges;
+ }
+ }
+ }
+
+ // Re-run search with new filters
+ performSearch();
+}
+
+// Remove single filter
+function removeFilter(field, value) {
+ toggleFilter(field, value, false);
+}
+
+// Clear all filters
+function clearAllFilters() {
+ currentFilters = {};
+ performSearch();
+}
+
// Initialize page
document.addEventListener('DOMContentLoaded', function() {
console.log('SearchEngine Frontend loaded');
diff --git a/search/es_query_builder.py b/search/es_query_builder.py
index 7cb8cf3..1e2aa5a 100644
--- a/search/es_query_builder.py
+++ b/search/es_query_builder.py
@@ -202,7 +202,36 @@ class ESQueryBuilder:
filter_clauses = []
for field, value in filters.items():
- if isinstance(value, dict):
+ if field == 'price_ranges':
+ # Handle price range filters
+ if isinstance(value, list):
+ price_ranges = []
+ for price_range in value:
+ if price_range == '0-50':
+ price_ranges.append({"lt": 50})
+ elif price_range == '50-100':
+ price_ranges.append({"gte": 50, "lt": 100})
+ elif price_range == '100-200':
+ price_ranges.append({"gte": 100, "lt": 200})
+ elif price_range == '200+':
+ price_ranges.append({"gte": 200})
+
+ if price_ranges:
+ if len(price_ranges) == 1:
+ filter_clauses.append({
+ "range": {
+ "price": price_ranges[0]
+ }
+ })
+ else:
+ # Multiple price ranges - use bool should clause
+ range_clauses = [{"range": {"price": pr}} for pr in price_ranges]
+ filter_clauses.append({
+ "bool": {
+ "should": range_clauses
+ }
+ })
+ elif isinstance(value, dict):
# Range query
if "gte" in value or "lte" in value or "gt" in value or "lt" in value:
filter_clauses.append({
@@ -266,6 +295,65 @@ class ESQueryBuilder:
return es_query
+ def add_dynamic_aggregations(
+ self,
+ es_query: Dict[str, Any],
+ aggregations: Dict[str, Any]
+ ) -> Dict[str, Any]:
+ """
+ Add dynamic aggregations based on request parameters.
+
+ Args:
+ es_query: Existing ES query
+ aggregations: Aggregation specifications
+
+ Returns:
+ Modified ES query
+ """
+ if "aggs" not in es_query:
+ es_query["aggs"] = {}
+
+ for agg_name, agg_spec in aggregations.items():
+ es_query["aggs"][agg_name] = agg_spec
+
+ return es_query
+
+ def add_sorting(
+ self,
+ es_query: Dict[str, Any],
+ sort_by: str,
+ sort_order: str = "desc"
+ ) -> Dict[str, Any]:
+ """
+ Add sorting to ES query.
+
+ Args:
+ es_query: Existing ES query
+ sort_by: Field name for sorting
+ sort_order: Sort order: 'asc' or 'desc'
+
+ Returns:
+ Modified ES query
+ """
+ if not sort_by:
+ return es_query
+
+ if not sort_order:
+ sort_order = "desc"
+
+ if "sort" not in es_query:
+ es_query["sort"] = []
+
+ # Add the specified sort
+ sort_field = {
+ sort_by: {
+ "order": sort_order.lower()
+ }
+ }
+ es_query["sort"].append(sort_field)
+
+ return es_query
+
def add_aggregations(
self,
es_query: Dict[str, Any],
diff --git a/search/multilang_query_builder.py b/search/multilang_query_builder.py
index 773202a..06946e5 100644
--- a/search/multilang_query_builder.py
+++ b/search/multilang_query_builder.py
@@ -319,7 +319,11 @@ class MultiLanguageQueryBuilder(ESQueryBuilder):
if hasattr(node, 'operator'):
# QueryNode object
operator = node.operator
- terms = node.terms
+ terms = node.terms if hasattr(node, 'terms') else None
+
+ # For TERM nodes, check if there's a value
+ if operator == 'TERM' and hasattr(node, 'value') and node.value:
+ terms = node.value
elif isinstance(node, tuple) and len(node) > 0:
# Tuple format from boolean parser
if hasattr(node[0], 'operator'):
@@ -353,9 +357,7 @@ class MultiLanguageQueryBuilder(ESQueryBuilder):
else:
return {"match_all": {}}
- print(f"[MultiLangQueryBuilder] Building boolean query for operator: {operator}")
- print(f"[MultiLangQueryBuilder] Terms: {terms}")
-
+
if operator == 'TERM':
# Leaf node - handle field:query format
if isinstance(terms, str) and ':' in terms:
@@ -365,31 +367,58 @@ class MultiLanguageQueryBuilder(ESQueryBuilder):
field: value
}
}
+ elif isinstance(terms, str):
+ # Simple text term - create match query
+ return {
+ "multi_match": {
+ "query": terms,
+ "fields": self.match_fields,
+ "type": "best_fields",
+ "operator": "AND"
+ }
+ }
else:
- return {"match_all": {}}
+ # Invalid TERM node - return empty match
+ return {
+ "match_none": {}
+ }
elif operator == 'OR':
# Any term must match
should_clauses = []
- for term in terms:
- should_clauses.append(self._build_boolean_query_from_tuple(term))
- return {
- "bool": {
- "should": should_clauses,
- "minimum_should_match": 1
+ if terms:
+ for term in terms:
+ clause = self._build_boolean_query_from_tuple(term)
+ if clause and clause.get("match_none") is None:
+ should_clauses.append(clause)
+
+ if should_clauses:
+ return {
+ "bool": {
+ "should": should_clauses,
+ "minimum_should_match": 1
+ }
}
- }
+ else:
+ return {"match_none": {}}
elif operator == 'AND':
# All terms must match
must_clauses = []
- for term in terms:
- must_clauses.append(self._build_boolean_query_from_tuple(term))
- return {
- "bool": {
- "must": must_clauses
+ if terms:
+ for term in terms:
+ clause = self._build_boolean_query_from_tuple(term)
+ if clause and clause.get("match_none") is None:
+ must_clauses.append(clause)
+
+ if must_clauses:
+ return {
+ "bool": {
+ "must": must_clauses
+ }
}
- }
+ else:
+ return {"match_none": {}}
elif operator == 'ANDNOT':
# First term must match, second must not
diff --git a/search/searcher.py b/search/searcher.py
index ee0dd7c..cb69ae2 100644
--- a/search/searcher.py
+++ b/search/searcher.py
@@ -103,7 +103,10 @@ class Searcher:
from_: int = 0,
filters: Optional[Dict[str, Any]] = None,
min_score: Optional[float] = None,
- context: Optional[RequestContext] = None
+ context: Optional[RequestContext] = None,
+ aggregations: Optional[Dict[str, Any]] = None,
+ sort_by: Optional[str] = None,
+ sort_order: Optional[str] = "desc"
) -> SearchResult:
"""
Execute search query.
@@ -115,6 +118,9 @@ class Searcher:
filters: Additional filters (field: value pairs)
min_score: Minimum score threshold
context: Request context for tracking (created if not provided)
+ aggregations: Aggregation specifications for faceted search
+ sort_by: Field name for sorting
+ sort_order: Sort order: 'asc' or 'desc'
Returns:
SearchResult object
@@ -146,7 +152,10 @@ class Searcher:
'enable_translation': enable_translation,
'enable_embedding': enable_embedding,
'enable_rerank': enable_rerank,
- 'min_score': min_score
+ 'min_score': min_score,
+ 'aggregations': aggregations,
+ 'sort_by': sort_by,
+ 'sort_order': sort_order
}
context.metadata['feature_flags'] = {
@@ -247,11 +256,19 @@ class Searcher:
)
# Add aggregations for faceted search
- if filters:
+ if aggregations:
+ # Use dynamic aggregations from request
+ es_query = self.query_builder.add_dynamic_aggregations(es_query, aggregations)
+ elif filters:
+ # Fallback to filter-based aggregations
agg_fields = [f"{k}_keyword" for k in filters.keys() if f"{k}_keyword" in [f.name for f in self.config.fields]]
if agg_fields:
es_query = self.query_builder.add_aggregations(es_query, agg_fields)
+ # Add sorting if specified
+ if sort_by:
+ es_query = self.query_builder.add_sorting(es_query, sort_by, sort_order)
+
# Extract size and from from body for ES client parameters
body_for_es = {k: v for k, v in es_query.items() if k not in ['size', 'from']}
diff --git a/simple_server.py b/simple_server.py
new file mode 100644
index 0000000..25fda66
--- /dev/null
+++ b/simple_server.py
@@ -0,0 +1,340 @@
+#!/usr/bin/env python3
+"""
+Simple API server for testing aggregation functionality without external dependencies.
+"""
+
+import json
+import time
+import random
+from http.server import HTTPServer, BaseHTTPRequestHandler
+from urllib.parse import urlparse, parse_qs
+import threading
+
+class SearchAPIHandler(BaseHTTPRequestHandler):
+ """Simple API handler for search requests."""
+
+ def do_OPTIONS(self):
+ """Handle CORS preflight requests."""
+ self.send_response(200)
+ self.send_header('Access-Control-Allow-Origin', '*')
+ self.send_header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
+ self.send_header('Access-Control-Allow-Headers', 'Content-Type')
+ self.end_headers()
+
+ def do_POST(self):
+ """Handle POST requests."""
+ if self.path == '/':
+ self.handle_search()
+ elif self.path == '/search/':
+ self.handle_search()
+ else:
+ self.send_response(404)
+ self.end_headers()
+
+ def handle_search(self):
+ """Handle search requests with aggregations."""
+ try:
+ # Read request body
+ content_length = int(self.headers['Content-Length'])
+ post_data = self.rfile.read(content_length)
+ request_data = json.loads(post_data.decode('utf-8'))
+
+ query = request_data.get('query', '')
+ size = request_data.get('size', 10)
+ sort_by = request_data.get('sort_by', 'relevance')
+ aggregations = request_data.get('aggregations', {})
+ filters = request_data.get('filters', {})
+
+ print(f"Search request: query='{query}', size={size}, sort_by={sort_by}")
+ print(f"Aggregations: {list(aggregations.keys()) if aggregations else 'None'}")
+ print(f"Filters: {filters if filters else 'None'}")
+
+ # Simulate processing time
+ time.sleep(0.1)
+
+ # Generate mock search results
+ results = self.generate_mock_results(query, size, sort_by, filters)
+
+ # Generate mock aggregations
+ aggregation_results = self.generate_mock_aggregations(aggregations, filters)
+
+ # Build response
+ response = {
+ "hits": results,
+ "total": len(results) + random.randint(10, 100),
+ "max_score": round(random.uniform(1.5, 3.5), 3),
+ "took_ms": random.randint(15, 45),
+ "aggregations": aggregation_results,
+ "query_info": {
+ "original_query": query,
+ "rewritten_query": query,
+ "detected_language": "zh" if any('\u4e00' <= char <= '\u9fff' for char in query) else "en",
+ "domain": "default",
+ "translations": {},
+ "has_vector": False
+ }
+ }
+
+ # Send response
+ self.send_response(200)
+ self.send_header('Content-Type', 'application/json')
+ self.send_header('Access-Control-Allow-Origin', '*')
+ self.end_headers()
+
+ response_json = json.dumps(response, ensure_ascii=False, indent=2)
+ self.wfile.write(response_json.encode('utf-8'))
+
+ print(f"Response sent with {len(results)} results and {len(aggregation_results)} aggregations")
+
+ except Exception as e:
+ print(f"Error handling request: {e}")
+ self.send_response(500)
+ self.send_header('Content-Type', 'application/json')
+ self.send_header('Access-Control-Allow-Origin', '*')
+ self.end_headers()
+
+ error_response = {
+ "error": str(e),
+ "detail": "Internal server error"
+ }
+
+ self.wfile.write(json.dumps(error_response).encode('utf-8'))
+
+ def generate_mock_results(self, query, size, sort_by, filters):
+ """Generate mock search results."""
+
+ # Sample product data
+ sample_products = [
+ {
+ "skuId": 1001,
+ "name": "芭比娃娃梦幻套装",
+ "enSpuName": "Barbie Dream House Playset",
+ "ruSkuName": "Кукла Барби Мечтательный домик",
+ "categoryName": "芭比",
+ "brandName": "美泰",
+ "supplierName": "义乌玩具厂",
+ "price": 89.99,
+ "imageUrl": "https://picsum.photos/seed/barbie1/200/200.jpg",
+ "create_time": "2024-01-15T10:30:00Z",
+ "days_since_last_update": 45
+ },
+ {
+ "skuId": 1002,
+ "name": "芭比娃娃时尚系列",
+ "enSpuName": "Barbie Fashion Doll Collection",
+ "ruSkuName": "Кукла Барби Модная коллекция",
+ "categoryName": "芭比",
+ "brandName": "美泰",
+ "supplierName": "汕头玩具公司",
+ "price": 45.50,
+ "imageUrl": "https://picsum.photos/seed/barbie2/200/200.jpg",
+ "create_time": "2024-02-20T14:15:00Z",
+ "days_since_last_update": 30
+ },
+ {
+ "skuId": 1003,
+ "name": "儿童积木套装",
+ "enSpuName": "Kids Building Blocks Set",
+ "ruSkuName": "Детский строительный набор",
+ "categoryName": "积木",
+ "brandName": "乐高",
+ "supplierName": "深圳塑胶制品厂",
+ "price": 158.00,
+ "imageUrl": "https://picsum.photos/seed/blocks1/200/200.jpg",
+ "create_time": "2024-01-10T09:20:00Z",
+ "days_since_last_update": 60
+ },
+ {
+ "skuId": 1004,
+ "name": "消防车玩具模型",
+ "enSpuName": "Fire Truck Toy Model",
+ "ruSkuName": "Модель пожарной машины",
+ "categoryName": "小汽车",
+ "brandName": "多美卡",
+ "supplierName": "东莞玩具制造厂",
+ "price": 78.50,
+ "imageUrl": "https://picsum.photos/seed/firetruck1/200/200.jpg",
+ "create_time": "2024-03-05T16:45:00Z",
+ "days_since_last_update": 15
+ },
+ {
+ "skuId": 1005,
+ "name": "婴儿毛绒玩具",
+ "enSpuName": "Baby Plush Toy",
+ "ruSkuName": "Детская плюшевая игрушка",
+ "categoryName": "婴儿娃娃",
+ "brandName": "迪士尼",
+ "supplierName": "上海礼品公司",
+ "price": 32.00,
+ "imageUrl": "https://picsum.photos/seed/plush1/200/200.jpg",
+ "create_time": "2024-02-14T11:30:00Z",
+ "days_since_last_update": 25
+ }
+ ]
+
+ # Apply filters if any
+ if filters:
+ filtered_products = []
+ for product in sample_products:
+ include = True
+
+ # Check category filter
+ if 'category_name' in filters:
+ if product['categoryName'] not in filters['category_name']:
+ include = False
+
+ # Check brand filter
+ if 'brand_name' in filters:
+ if product['brandName'] not in filters['brand_name']:
+ include = False
+
+ # Check price range filter
+ if 'price_ranges' in filters:
+ price = product['price']
+ in_range = False
+ for price_range in filters['price_ranges']:
+ if price_range == '0-50' and price <= 50:
+ in_range = True
+ elif price_range == '50-100' and 50 < price <= 100:
+ in_range = True
+ elif price_range == '100-200' and 100 < price <= 200:
+ in_range = True
+ elif price_range == '200+' and price > 200:
+ in_range = True
+ if not in_range:
+ include = False
+
+ if include:
+ filtered_products.append(product)
+ sample_products = filtered_products
+
+ # Apply sorting
+ if sort_by == 'price_asc':
+ sample_products.sort(key=lambda x: x.get('price', 0))
+ elif sort_by == 'price_desc':
+ sample_products.sort(key=lambda x: x.get('price', 0), reverse=True)
+ elif sort_by == 'time_desc':
+ sample_products.sort(key=lambda x: x.get('create_time', ''), reverse=True)
+
+ # Convert to API response format
+ results = []
+ for i, product in enumerate(sample_products[:size]):
+ hit = {
+ "_id": str(product['skuId']),
+ "_score": round(random.uniform(1.5, 3.5), 3),
+ "_source": product
+ }
+ results.append(hit)
+
+ return results
+
+ def generate_mock_aggregations(self, aggregations, filters):
+ """Generate mock aggregation results."""
+ if not aggregations:
+ return {}
+
+ result = {}
+
+ for agg_name, agg_spec in aggregations.items():
+ agg_type = agg_spec.get('type', 'terms')
+
+ if agg_type == 'terms':
+ # Generate mock terms aggregation
+ if agg_name == 'category_name':
+ buckets = [
+ {"key": "芭比", "doc_count": random.randint(15, 35)},
+ {"key": "儿童娃娃", "doc_count": random.randint(8, 20)},
+ {"key": "积木", "doc_count": random.randint(5, 15)},
+ {"key": "小汽车", "doc_count": random.randint(3, 12)},
+ {"key": "婴儿娃娃", "doc_count": random.randint(4, 10)},
+ {"key": "人物", "doc_count": random.randint(6, 18)}
+ ]
+ elif agg_name == 'brand_name':
+ buckets = [
+ {"key": "美泰", "doc_count": random.randint(20, 40)},
+ {"key": "乐高", "doc_count": random.randint(10, 25)},
+ {"key": "迪士尼", "doc_count": random.randint(8, 20)},
+ {"key": "多美卡", "doc_count": random.randint(5, 15)},
+ {"key": "孩之宝", "doc_count": random.randint(6, 18)},
+ {"key": "万代", "doc_count": random.randint(3, 10)}
+ ]
+ elif agg_name == 'material_type':
+ buckets = [
+ {"key": "塑料", "doc_count": random.randint(40, 80)},
+ {"key": "布绒", "doc_count": random.randint(8, 20)},
+ {"key": "金属", "doc_count": random.randint(5, 15)},
+ {"key": "木质", "doc_count": random.randint(3, 12)}
+ ]
+ else:
+ # Generic terms aggregation
+ buckets = [
+ {"key": f"选项{i+1}", "doc_count": random.randint(5, 25)}
+ for i in range(5)
+ ]
+
+ result[agg_name] = {
+ "doc_count_error_upper_bound": 0,
+ "sum_other_doc_count": random.randint(10, 50),
+ "buckets": buckets
+ }
+
+ elif agg_type == 'range':
+ # Generate mock range aggregation (usually for price)
+ if agg_name == 'price_ranges':
+ ranges = agg_spec.get('ranges', [])
+ buckets = []
+ for range_spec in ranges:
+ key = range_spec.get('key', 'unknown')
+ count = random.randint(5, 30)
+ bucket_data = {"key": key, "doc_count": count}
+
+ # Add range bounds
+ if 'to' in range_spec:
+ bucket_data['to'] = range_spec['to']
+ if 'from' in range_spec:
+ bucket_data['from'] = range_spec['from']
+
+ buckets.append(bucket_data)
+
+ result[agg_name] = {"buckets": buckets}
+
+ return result
+
+ def log_message(self, format, *args):
+ """Override to reduce log noise."""
+ pass
+
+def run_server():
+ """Run the API server."""
+ server_address = ('', 6002)
+ httpd = HTTPServer(server_address, SearchAPIHandler)
+ print("🚀 Simple Search API Server started!")
+ print("📍 API: http://localhost:6002")
+ print("🔍 Search endpoint: http://localhost:6002/search/")
+ print("🌐 Frontend should connect to: http://localhost:6002")
+ print("⏹️ Press Ctrl+C to stop")
+
+ try:
+ httpd.serve_forever()
+ except KeyboardInterrupt:
+ print("\n🛑 Server stopped")
+ httpd.server_close()
+
+def run_server():
+ """Run the API server - main entry point."""
+ server_address = ('', 6002)
+ httpd = HTTPServer(server_address, SearchAPIHandler)
+ print("🚀 Simple Search API Server started!")
+ print("📍 API: http://localhost:6002")
+ print("🔍 Search endpoint: http://localhost:6002/search/")
+ print("🌐 Frontend should connect to: http://localhost:6002")
+ print("⏹️ Press Ctrl+C to stop")
+
+ try:
+ httpd.serve_forever()
+ except KeyboardInterrupt:
+ print("\n🛑 Server stopped")
+ httpd.server_close()
+
+if __name__ == '__main__':
+ run_server()
\ No newline at end of file
diff --git a/test_aggregation_api.py b/test_aggregation_api.py
new file mode 100644
index 0000000..1974042
--- /dev/null
+++ b/test_aggregation_api.py
@@ -0,0 +1,166 @@
+#!/usr/bin/env python3
+"""
+Test script for aggregation functionality
+"""
+
+import requests
+import json
+
+API_BASE_URL = 'http://120.76.41.98:6002'
+
+def test_search_with_aggregations():
+ """Test search with aggregations"""
+
+ # Test data
+ test_query = {
+ "query": "玩具",
+ "size": 5,
+ "aggregations": {
+ "category_stats": {
+ "terms": {
+ "field": "categoryName_keyword",
+ "size": 10
+ }
+ },
+ "brand_stats": {
+ "terms": {
+ "field": "brandName_keyword",
+ "size": 10
+ }
+ },
+ "price_ranges": {
+ "range": {
+ "field": "price",
+ "ranges": [
+ {"key": "0-50", "to": 50},
+ {"key": "50-100", "from": 50, "to": 100},
+ {"key": "100-200", "from": 100, "to": 200},
+ {"key": "200+", "from": 200}
+ ]
+ }
+ }
+ }
+ }
+
+ print("Testing search with aggregations...")
+ print(f"Query: {json.dumps(test_query, indent=2, ensure_ascii=False)}")
+
+ try:
+ response = requests.post(f"{API_BASE_URL}/search/",
+ json=test_query,
+ headers={'Content-Type': 'application/json'})
+
+ print(f"Status Code: {response.status_code}")
+
+ if response.ok:
+ data = response.json()
+ print(f"Found {data['total']} results in {data['took_ms']}ms")
+ print(f"Max Score: {data['max_score']}")
+
+ # Print aggregations
+ if data.get('aggregations'):
+ print("\nAggregations:")
+ for agg_name, agg_result in data['aggregations'].items():
+ print(f"\n{agg_name}:")
+ if 'buckets' in agg_result:
+ for bucket in agg_result['buckets'][:5]: # Show first 5 buckets
+ print(f" - {bucket['key']}: {bucket['doc_count']}")
+
+ # Print first few results
+ print(f"\nFirst 3 results:")
+ for i, hit in enumerate(data['hits'][:3]):
+ source = hit['_source']
+ print(f"\n{i+1}. {source.get('name', 'N/A')}")
+ print(f" Category: {source.get('categoryName', 'N/A')}")
+ print(f" Brand: {source.get('brandName', 'N/A')}")
+ print(f" Price: {source.get('price', 'N/A')}")
+ print(f" Score: {hit['_score']:.4f}")
+ else:
+ print(f"Error: {response.status_code}")
+ print(f"Response: {response.text}")
+
+ except Exception as e:
+ print(f"Request failed: {e}")
+
+def test_search_with_filters():
+ """Test search with filters"""
+
+ test_filters = {
+ "query": "玩具",
+ "size": 5,
+ "filters": {
+ "categoryName_keyword": ["玩具"],
+ "price_ranges": ["0-50", "50-100"]
+ }
+ }
+
+ print("\n\nTesting search with filters...")
+ print(f"Query: {json.dumps(test_filters, indent=2, ensure_ascii=False)}")
+
+ try:
+ response = requests.post(f"{API_BASE_URL}/search/",
+ json=test_filters,
+ headers={'Content-Type': 'application/json'})
+
+ print(f"Status Code: {response.status_code}")
+
+ if response.ok:
+ data = response.json()
+ print(f"Found {data['total']} results in {data['took_ms']}ms")
+
+ print(f"\nFirst 3 results:")
+ for i, hit in enumerate(data['hits'][:3]):
+ source = hit['_source']
+ print(f"\n{i+1}. {source.get('name', 'N/A')}")
+ print(f" Category: {source.get('categoryName', 'N/A')}")
+ print(f" Brand: {source.get('brandName', 'N/A')}")
+ print(f" Price: {source.get('price', 'N/A')}")
+ print(f" Score: {hit['_score']:.4f}")
+ else:
+ print(f"Error: {response.status_code}")
+ print(f"Response: {response.text}")
+
+ except Exception as e:
+ print(f"Request failed: {e}")
+
+def test_search_with_sorting():
+ """Test search with sorting"""
+
+ test_sort = {
+ "query": "玩具",
+ "size": 5,
+ "sort_by": "price",
+ "sort_order": "asc"
+ }
+
+ print("\n\nTesting search with sorting (price ascending)...")
+ print(f"Query: {json.dumps(test_sort, indent=2, ensure_ascii=False)}")
+
+ try:
+ response = requests.post(f"{API_BASE_URL}/search/",
+ json=test_sort,
+ headers={'Content-Type': 'application/json'})
+
+ print(f"Status Code: {response.status_code}")
+
+ if response.ok:
+ data = response.json()
+ print(f"Found {data['total']} results in {data['took_ms']}ms")
+
+ print(f"\nFirst 3 results (sorted by price):")
+ for i, hit in enumerate(data['hits'][:3]):
+ source = hit['_source']
+ print(f"\n{i+1}. {source.get('name', 'N/A')}")
+ print(f" Price: {source.get('price', 'N/A')}")
+ print(f" Score: {hit['_score']:.4f}")
+ else:
+ print(f"Error: {response.status_code}")
+ print(f"Response: {response.text}")
+
+ except Exception as e:
+ print(f"Request failed: {e}")
+
+if __name__ == "__main__":
+ test_search_with_aggregations()
+ test_search_with_filters()
+ test_search_with_sorting()
\ No newline at end of file
diff --git a/test_aggregation_functionality.py b/test_aggregation_functionality.py
new file mode 100644
index 0000000..0b12bbc
--- /dev/null
+++ b/test_aggregation_functionality.py
@@ -0,0 +1,236 @@
+#!/usr/bin/env python3
+"""
+Simple test script to verify aggregation functionality without external dependencies.
+"""
+
+import sys
+import os
+
+# Add the project root to the Python path
+sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
+
+def test_es_query_builder_aggregations():
+ """Test the ES query builder aggregation methods."""
+ print("Testing ES Query Builder Aggregation Methods...")
+
+ # Import the query builder
+ try:
+ from search.es_query_builder import ESQueryBuilder
+ print("✓ ESQueryBuilder imported successfully")
+ except ImportError as e:
+ print(f"✗ Failed to import ESQueryBuilder: {e}")
+ return False
+
+ # Create a query builder instance
+ builder = ESQueryBuilder(
+ index_name="test_index",
+ match_fields=["name", "description"]
+ )
+
+ # Test basic aggregation
+ es_query = {"query": {"match_all": {}}}
+
+ # Test add_dynamic_aggregations
+ aggregations = {
+ "category_name": {
+ "type": "terms",
+ "field": "categoryName_keyword",
+ "size": 10
+ },
+ "price_ranges": {
+ "type": "range",
+ "field": "price",
+ "ranges": [
+ {"key": "0-50", "to": 50},
+ {"key": "50-100", "from": 50, "to": 100}
+ ]
+ }
+ }
+
+ result_query = builder.add_dynamic_aggregations(es_query, aggregations)
+
+ if "aggs" in result_query:
+ print("✓ Aggregations added to query")
+
+ # Check category aggregation
+ if "category_name" in result_query["aggs"]:
+ category_agg = result_query["aggs"]["category_name"]
+ if "terms" in category_agg and category_agg["terms"]["field"] == "categoryName_keyword":
+ print("✓ Category aggregation correctly configured")
+ else:
+ print("✗ Category aggregation incorrectly configured")
+ return False
+
+ # Check price range aggregation
+ if "price_ranges" in result_query["aggs"]:
+ price_agg = result_query["aggs"]["price_ranges"]
+ if "range" in price_agg and price_agg["range"]["field"] == "price":
+ print("✓ Price range aggregation correctly configured")
+ else:
+ print("✗ Price range aggregation incorrectly configured")
+ return False
+ else:
+ print("✗ No aggregations added to query")
+ return False
+
+ # Test sorting
+ result_query_asc = builder.add_sorting({}, "price_asc")
+ if "sort" in result_query_asc:
+ print("✓ Price ascending sort added")
+ else:
+ print("✗ Price ascending sort not added")
+ return False
+
+ result_query_desc = builder.add_sorting({}, "price_desc")
+ if "sort" in result_query_desc:
+ print("✓ Price descending sort added")
+ else:
+ print("✗ Price descending sort not added")
+ return False
+
+ result_query_time = builder.add_sorting({}, "time_desc")
+ if "sort" in result_query_time:
+ print("✓ Time descending sort added")
+ else:
+ print("✗ Time descending sort not added")
+ return False
+
+ return True
+
+
+def test_searcher_integration():
+ """Test searcher integration with new parameters."""
+ print("\nTesting Searcher Integration...")
+
+ try:
+ from search.searcher import Searcher
+ print("✓ Searcher imported successfully")
+ except ImportError as e:
+ print(f"✗ Failed to import Searcher: {e}")
+ return False
+
+ # We can't easily test the full searcher without ES, but we can check the method signature
+ import inspect
+ search_method = getattr(Searcher, 'search', None)
+
+ if search_method:
+ sig = inspect.signature(search_method)
+ params = list(sig.parameters.keys())
+
+ expected_params = ['query', 'size', 'from_', 'filters', 'min_score', 'aggregations', 'sort_by', 'context']
+ for param in expected_params:
+ if param in params:
+ print(f"✓ Parameter '{param}' found in search method")
+ else:
+ print(f"✗ Parameter '{param}' missing from search method")
+ return False
+ else:
+ print("✗ Search method not found in Searcher class")
+ return False
+
+ return True
+
+
+def test_api_route_integration():
+ """Test API route integration."""
+ print("\nTesting API Route Integration...")
+
+ try:
+ from api.routes.search import router
+ print("✓ Search router imported successfully")
+ except ImportError as e:
+ print(f"✗ Failed to import search router: {e}")
+ return False
+
+ # Check if the route exists
+ routes = [route.path for route in router.routes]
+ if "/" in routes:
+ print("✓ Main search route found")
+ else:
+ print("✗ Main search route not found")
+ return False
+
+ return True
+
+
+def test_configuration():
+ """Test configuration parsing."""
+ print("\nTesting Configuration...")
+
+ try:
+ from config import CustomerConfig
+ print("✓ CustomerConfig imported successfully")
+ except ImportError as e:
+ print(f"✗ Failed to import CustomerConfig: {e}")
+ return False
+
+ # Try to load the customer1 config
+ try:
+ config = CustomerConfig.load_from_file("config/schema/customer1_config.yaml")
+ print("✓ Customer1 configuration loaded successfully")
+
+ # Check if price field is in the configuration
+ field_names = [field.name for field in config.fields]
+ if "price" in field_names:
+ print("✓ Price field found in configuration")
+ else:
+ print("✗ Price field not found in configuration")
+ return False
+
+ # Check keyword fields for aggregations
+ if "categoryName_keyword" in field_names:
+ print("✓ Category keyword field found")
+ else:
+ print("✗ Category keyword field not found")
+ return False
+
+ if "brandName_keyword" in field_names:
+ print("✓ Brand keyword field found")
+ else:
+ print("✗ Brand keyword field not found")
+ return False
+
+ except Exception as e:
+ print(f"✗ Failed to load configuration: {e}")
+ return False
+
+ return True
+
+
+def main():
+ """Run all tests."""
+ print("=== Search Engine Aggregation Functionality Tests ===\n")
+
+ tests = [
+ test_es_query_builder_aggregations,
+ test_searcher_integration,
+ test_api_route_integration,
+ test_configuration
+ ]
+
+ passed = 0
+ total = len(tests)
+
+ for test in tests:
+ try:
+ if test():
+ passed += 1
+ print(f"✓ {test.__name__} PASSED")
+ else:
+ print(f"✗ {test.__name__} FAILED")
+ except Exception as e:
+ print(f"✗ {test.__name__} ERROR: {e}")
+
+ print(f"\n=== Test Results: {passed}/{total} tests passed ===")
+
+ if passed == total:
+ print("🎉 All tests passed! Aggregation functionality is ready.")
+ return True
+ else:
+ print("❌ Some tests failed. Please check the implementation.")
+ return False
+
+
+if __name__ == "__main__":
+ success = main()
+ sys.exit(0 if success else 1)
\ No newline at end of file
diff --git a/test_complete_search.py b/test_complete_search.py
new file mode 100644
index 0000000..ded765f
--- /dev/null
+++ b/test_complete_search.py
@@ -0,0 +1,211 @@
+#!/usr/bin/env python3
+"""
+Complete test script simulating frontend search interaction
+"""
+
+import requests
+import json
+
+API_BASE_URL = 'http://120.76.41.98:6002'
+
+def test_complete_search_workflow():
+ """Test complete search workflow similar to frontend"""
+
+ print("=" * 60)
+ print("完整搜索流程测试")
+ print("=" * 60)
+
+ # Step 1: Initial search with aggregations
+ print("\n1️⃣ 初始搜索(带聚合功能)")
+ print("-" * 30)
+
+ search_request = {
+ "query": "芭比娃娃",
+ "size": 10,
+ "aggregations": {
+ "category_stats": {
+ "terms": {
+ "field": "categoryName_keyword",
+ "size": 10
+ }
+ },
+ "brand_stats": {
+ "terms": {
+ "field": "brandName_keyword",
+ "size": 10
+ }
+ },
+ "price_ranges": {
+ "range": {
+ "field": "price",
+ "ranges": [
+ {"key": "0-50", "to": 50},
+ {"key": "50-100", "from": 50, "to": 100},
+ {"key": "100-200", "from": 100, "to": 200},
+ {"key": "200+", "from": 200}
+ ]
+ }
+ }
+ }
+ }
+
+ try:
+ response = requests.post(f"{API_BASE_URL}/search/", json=search_request)
+
+ if response.ok:
+ data = response.json()
+ print(f"✅ 找到 {data['total']} 个结果,耗时 {data['took_ms']}ms")
+
+ # Show aggregations results
+ if data.get('aggregations'):
+ print("\n📊 聚合结果:")
+
+ # Category aggregations
+ if 'category_stats' in data['aggregations']:
+ print(" 🏷️ 分类统计:")
+ for bucket in data['aggregations']['category_stats']['buckets'][:3]:
+ print(f" - {bucket['key']}: {bucket['doc_count']} 个商品")
+
+ # Brand aggregations
+ if 'brand_stats' in data['aggregations']:
+ print(" 🏢 品牌统计:")
+ for bucket in data['aggregations']['brand_stats']['buckets'][:3]:
+ print(f" - {bucket['key']}: {bucket['doc_count']} 个商品")
+
+ # Price ranges
+ if 'price_ranges' in data['aggregations']:
+ print(" 💰 价格分布:")
+ for bucket in data['aggregations']['price_ranges']['buckets']:
+ print(f" - {bucket['key']}: {bucket['doc_count']} 个商品")
+
+ # Show sample results
+ print(f"\n🔍 前3个搜索结果:")
+ for i, hit in enumerate(data['hits'][:3]):
+ source = hit['_source']
+ price = source.get('price', 'N/A')
+ category = source.get('categoryName', 'N/A')
+ brand = source.get('brandName', 'N/A')
+ print(f" {i+1}. {source.get('name', 'N/A')}")
+ print(f" 💰 价格: {price}")
+ print(f" 📁 分类: {category}")
+ print(f" 🏷️ 品牌: {brand}")
+ print(f" ⭐ 评分: {hit['_score']:.3f}")
+ print()
+
+ else:
+ print(f"❌ 搜索失败: {response.status_code}")
+ print(f"错误信息: {response.text}")
+
+ except Exception as e:
+ print(f"❌ 请求异常: {e}")
+
+ # Step 2: Search with filters
+ print("\n2️⃣ 带过滤条件的搜索")
+ print("-" * 30)
+
+ filtered_search = {
+ "query": "芭比娃娃",
+ "size": 5,
+ "filters": {
+ "brandName_keyword": ["美泰"],
+ "price_ranges": ["50-100", "100-200"]
+ }
+ }
+
+ try:
+ response = requests.post(f"{API_BASE_URL}/search/", json=filtered_search)
+
+ if response.ok:
+ data = response.json()
+ print(f"✅ 过滤后找到 {data['total']} 个结果,耗时 {data['took_ms']}ms")
+ print(" 🎯 过滤条件: 品牌=美泰, 价格=¥50-200")
+
+ print(f"\n💫 前3个过滤结果:")
+ for i, hit in enumerate(data['hits'][:3]):
+ source = hit['_source']
+ price = source.get('price', 'N/A')
+ category = source.get('categoryName', 'N/A')
+ brand = source.get('brandName', 'N/A')
+ print(f" {i+1}. {source.get('name', 'N/A')}")
+ print(f" 💰 ¥{price} | 📁 {category} | 🏷️ {brand}")
+ print(f" ⭐ 评分: {hit['_score']:.3f}")
+
+ else:
+ print(f"❌ 过滤搜索失败: {response.status_code}")
+
+ except Exception as e:
+ print(f"❌ 请求异常: {e}")
+
+ # Step 3: Search with sorting
+ print("\n3️⃣ 排序搜索")
+ print("-" * 30)
+
+ # Test price ascending
+ price_asc_search = {
+ "query": "芭比娃娃",
+ "size": 3,
+ "sort_by": "price",
+ "sort_order": "asc"
+ }
+
+ try:
+ response = requests.post(f"{API_BASE_URL}/search/", json=price_asc_search)
+
+ if response.ok:
+ data = response.json()
+ print(f"✅ 价格升序排序,找到 {data['total']} 个结果")
+ print(" 📈 排序方式: 价格从低到高")
+
+ print(f"\n💵 价格排序结果:")
+ for i, hit in enumerate(data['hits']):
+ source = hit['_source']
+ price = source.get('price', 'N/A')
+ name = source.get('name', 'N/A')
+ print(f" {i+1}. ¥{price} - {name}")
+
+ else:
+ print(f"❌ 排序搜索失败: {response.status_code}")
+
+ except Exception as e:
+ print(f"❌ 请求异常: {e}")
+
+ # Step 4: Test time sorting
+ print("\n4️⃣ 时间排序测试")
+ print("-" * 30)
+
+ time_sort_search = {
+ "query": "芭比娃娃",
+ "size": 3,
+ "sort_by": "create_time",
+ "sort_order": "desc"
+ }
+
+ try:
+ response = requests.post(f"{API_BASE_URL}/search/", json=time_sort_search)
+
+ if response.ok:
+ data = response.json()
+ print(f"✅ 时间降序排序,找到 {data['total']} 个结果")
+ print(" 📅 排序方式: 上架时间从新到旧")
+
+ print(f"\n🕐 时间排序结果:")
+ for i, hit in enumerate(data['hits']):
+ source = hit['_source']
+ create_time = source.get('create_time', 'N/A')
+ name = source.get('name', 'N/A')
+ print(f" {i+1}. {create_time} - {name}")
+
+ else:
+ print(f"❌ 时间排序失败: {response.status_code}")
+
+ except Exception as e:
+ print(f"❌ 请求异常: {e}")
+
+ print("\n" + "=" * 60)
+ print("🎉 搜索功能测试完成!")
+ print("✨ 前端访问地址: http://localhost:8080")
+ print("🔧 后端API地址: http://120.76.41.98:6002")
+ print("=" * 60)
+
+if __name__ == "__main__":
+ test_complete_search_workflow()
\ No newline at end of file
diff --git a/test_minimal_sort.py b/test_minimal_sort.py
new file mode 100644
index 0000000..946058c
--- /dev/null
+++ b/test_minimal_sort.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python3
+"""
+Minimal test to isolate sort issue
+"""
+
+import requests
+import json
+
+def test_minimal_sort():
+ """Test minimal sort case"""
+
+ base_url = "http://120.76.41.98:6002"
+
+ # Test 1: No sort parameters
+ print("Test 1: No sort parameters")
+ response = requests.post(f"{base_url}/search/", json={"query": "test", "size": 1})
+ print(f"Status: {response.status_code}")
+ print(f"Response: {response.text[:200]}...")
+
+ # Test 2: Empty sort_by
+ print("\nTest 2: Empty sort_by")
+ response = requests.post(f"{base_url}/search/", json={"query": "test", "size": 1, "sort_by": ""})
+ print(f"Status: {response.status_code}")
+ print(f"Response: {response.text[:200]}...")
+
+ # Test 3: sort_by only (no sort_order)
+ print("\nTest 3: sort_by only")
+ response = requests.post(f"{base_url}/search/", json={"query": "test", "size": 1, "sort_by": "create_time"})
+ print(f"Status: {response.status_code}")
+ print(f"Response: {response.text[:200]}...")
+
+ # Test 4: sort_order only (no sort_by)
+ print("\nTest 4: sort_order only")
+ response = requests.post(f"{base_url}/search/", json={"query": "test", "size": 1, "sort_order": "desc"})
+ print(f"Status: {response.status_code}")
+ print(f"Response: {response.text[:200]}...")
+
+ # Test 5: Both parameters with None values
+ print("\nTest 5: Both parameters with null values")
+ response = requests.post(f"{base_url}/search/", json={"query": "test", "size": 1, "sort_by": None, "sort_order": None})
+ print(f"Status: {response.status_code}")
+ print(f"Response: {response.text[:200]}...")
+
+if __name__ == "__main__":
+ test_minimal_sort()
\ No newline at end of file
diff --git a/tests/integration/test_aggregation_api.py b/tests/integration/test_aggregation_api.py
new file mode 100644
index 0000000..8540533
--- /dev/null
+++ b/tests/integration/test_aggregation_api.py
@@ -0,0 +1,256 @@
+"""
+Tests for aggregation API functionality.
+"""
+
+import pytest
+from fastapi.testclient import TestClient
+from api.app import app
+
+client = TestClient(app)
+
+
+@pytest.mark.integration
+@pytest.mark.api
+def test_search_with_aggregations():
+ """Test search with dynamic aggregations."""
+ request_data = {
+ "query": "芭比娃娃",
+ "size": 10,
+ "aggregations": {
+ "category_name": {
+ "type": "terms",
+ "field": "categoryName_keyword",
+ "size": 10
+ },
+ "brand_name": {
+ "type": "terms",
+ "field": "brandName_keyword",
+ "size": 10
+ },
+ "price_ranges": {
+ "type": "range",
+ "field": "price",
+ "ranges": [
+ {"key": "0-50", "to": 50},
+ {"key": "50-100", "from": 50, "to": 100},
+ {"key": "100-200", "from": 100, "to": 200},
+ {"key": "200+", "from": 200}
+ ]
+ }
+ }
+ }
+
+ response = client.post("/search/", json=request_data)
+
+ assert response.status_code == 200
+ data = response.json()
+
+ # Check basic search response structure
+ assert "hits" in data
+ assert "total" in data
+ assert "aggregations" in data
+ assert "query_info" in data
+
+ # Check aggregations structure
+ aggregations = data["aggregations"]
+
+ # Should have category aggregations
+ if "category_name" in aggregations:
+ assert "buckets" in aggregations["category_name"]
+ assert isinstance(aggregations["category_name"]["buckets"], list)
+
+ # Should have brand aggregations
+ if "brand_name" in aggregations:
+ assert "buckets" in aggregations["brand_name"]
+ assert isinstance(aggregations["brand_name"]["buckets"], list)
+
+ # Should have price range aggregations
+ if "price_ranges" in aggregations:
+ assert "buckets" in aggregations["price_ranges"]
+ assert isinstance(aggregations["price_ranges"]["buckets"], list)
+
+
+@pytest.mark.integration
+@pytest.mark.api
+def test_search_with_sorting():
+ """Test search with different sorting options."""
+
+ # Test price ascending
+ request_data = {
+ "query": "玩具",
+ "size": 5,
+ "sort_by": "price_asc"
+ }
+
+ response = client.post("/search/", json=request_data)
+ assert response.status_code == 200
+ data = response.json()
+
+ if data["hits"] and len(data["hits"]) > 1:
+ # Check if results are sorted by price (ascending)
+ prices = []
+ for hit in data["hits"]:
+ if "_source" in hit and "price" in hit["_source"]:
+ prices.append(hit["_source"]["price"])
+
+ if len(prices) > 1:
+ assert prices == sorted(prices), "Results should be sorted by price ascending"
+
+ # Test price descending
+ request_data["sort_by"] = "price_desc"
+ response = client.post("/search/", json=request_data)
+ assert response.status_code == 200
+ data = response.json()
+
+ if data["hits"] and len(data["hits"]) > 1:
+ prices = []
+ for hit in data["hits"]:
+ if "_source" in hit and "price" in hit["_source"]:
+ prices.append(hit["_source"]["price"])
+
+ if len(prices) > 1:
+ assert prices == sorted(prices, reverse=True), "Results should be sorted by price descending"
+
+ # Test time descending
+ request_data["sort_by"] = "time_desc"
+ response = client.post("/search/", json=request_data)
+ assert response.status_code == 200
+ data = response.json()
+
+ if data["hits"] and len(data["hits"]) > 1:
+ times = []
+ for hit in data["hits"]:
+ if "_source" in hit and "create_time" in hit["_source"]:
+ times.append(hit["_source"]["create_time"])
+
+ if len(times) > 1:
+ # Newer items should come first
+ assert times == sorted(times, reverse=True), "Results should be sorted by time descending"
+
+
+@pytest.mark.integration
+@pytest.mark.api
+def test_search_with_filters_and_aggregations():
+ """Test search with filters and aggregations together."""
+ request_data = {
+ "query": "玩具",
+ "size": 10,
+ "filters": {
+ "category_name": ["芭比"]
+ },
+ "aggregations": {
+ "brand_name": {
+ "type": "terms",
+ "field": "brandName_keyword",
+ "size": 10
+ }
+ }
+ }
+
+ response = client.post("/search/", json=request_data)
+ assert response.status_code == 200
+ data = response.json()
+
+ # Check that results are filtered
+ assert "hits" in data
+ for hit in data["hits"]:
+ if "_source" in hit and "categoryName" in hit["_source"]:
+ assert "芭比" in hit["_source"]["categoryName"]
+
+ # Check that aggregations are still present
+ assert "aggregations" in data
+
+
+@pytest.mark.integration
+@pytest.mark.api
+def test_search_without_aggregations():
+ """Test search without aggregations (default behavior)."""
+ request_data = {
+ "query": "玩具",
+ "size": 10
+ }
+
+ response = client.post("/search/", json=request_data)
+ assert response.status_code == 200
+ data = response.json()
+
+ # Should still have basic response structure
+ assert "hits" in data
+ assert "total" in data
+ assert "query_info" in data
+
+ # Aggregations might be empty or not present without explicit request
+ assert "aggregations" in data
+
+
+@pytest.mark.integration
+@pytest.mark.api
+def test_aggregation_edge_cases():
+ """Test aggregation edge cases."""
+
+ # Test with empty query
+ request_data = {
+ "query": "",
+ "size": 10,
+ "aggregations": {
+ "category_name": {
+ "type": "terms",
+ "field": "categoryName_keyword",
+ "size": 10
+ }
+ }
+ }
+
+ response = client.post("/search/", json=request_data)
+ # Should handle empty query gracefully
+ assert response.status_code in [200, 422]
+
+ # Test with invalid aggregation type
+ request_data = {
+ "query": "玩具",
+ "size": 10,
+ "aggregations": {
+ "invalid_agg": {
+ "type": "invalid_type",
+ "field": "categoryName_keyword",
+ "size": 10
+ }
+ }
+ }
+
+ response = client.post("/search/", json=request_data)
+ # Should handle invalid aggregation type gracefully
+ assert response.status_code in [200, 422]
+
+
+@pytest.mark.unit
+def test_aggregation_spec_validation():
+ """Test aggregation specification validation."""
+ from api.models import AggregationSpec
+
+ # Test valid aggregation spec
+ agg_spec = AggregationSpec(
+ field="categoryName_keyword",
+ type="terms",
+ size=10
+ )
+ assert agg_spec.field == "categoryName_keyword"
+ assert agg_spec.type == "terms"
+ assert agg_spec.size == 10
+
+ # Test range aggregation spec
+ range_agg = AggregationSpec(
+ field="price",
+ type="range",
+ ranges=[
+ {"key": "0-50", "to": 50},
+ {"key": "50-100", "from": 50, "to": 100}
+ ]
+ )
+ assert range_agg.field == "price"
+ assert range_agg.type == "range"
+ assert len(range_agg.ranges) == 2
+
+
+if __name__ == "__main__":
+ pytest.main([__file__])
\ No newline at end of file
--
libgit2 0.21.2