Commit c86c8237c01f306edbc5fad41dc397ddd47c9012
1 parent
167a0cb2
支持聚合。过滤项补充了逻辑,但是有问题
Showing
16 changed files
with
1976 additions
and
35 deletions
Show diff stats
api/models.py
| ... | ... | @@ -13,6 +13,10 @@ class SearchRequest(BaseModel): |
| 13 | 13 | from_: int = Field(0, ge=0, alias="from", description="Offset for pagination") |
| 14 | 14 | filters: Optional[Dict[str, Any]] = Field(None, description="Additional filters") |
| 15 | 15 | min_score: Optional[float] = Field(None, description="Minimum score threshold") |
| 16 | + # 新增字段 | |
| 17 | + aggregations: Optional[Dict[str, Any]] = Field(None, description="Aggregation specifications") | |
| 18 | + sort_by: Optional[str] = Field(None, description="Sort field name") | |
| 19 | + sort_order: Optional[str] = Field("desc", description="Sort order: 'asc' or 'desc'") | |
| 16 | 20 | |
| 17 | 21 | |
| 18 | 22 | class ImageSearchRequest(BaseModel): | ... | ... |
api/routes/search.py
| ... | ... | @@ -68,7 +68,10 @@ async def search(request: SearchRequest, http_request: Request): |
| 68 | 68 | from_=request.from_, |
| 69 | 69 | filters=request.filters, |
| 70 | 70 | min_score=request.min_score, |
| 71 | - context=context | |
| 71 | + context=context, | |
| 72 | + aggregations=request.aggregations, | |
| 73 | + sort_by=request.sort_by, | |
| 74 | + sort_order=request.sort_order | |
| 72 | 75 | ) |
| 73 | 76 | |
| 74 | 77 | # Include performance summary in response | ... | ... |
config/schema/customer1_config.yaml
| ... | ... | @@ -115,6 +115,21 @@ fields: |
| 115 | 115 | index: true |
| 116 | 116 | store: false |
| 117 | 117 | |
| 118 | + - name: "supplierName_keyword" | |
| 119 | + type: "KEYWORD" | |
| 120 | + source_table: "extension" | |
| 121 | + source_column: "supplierName" | |
| 122 | + index: true | |
| 123 | + store: false | |
| 124 | + | |
| 125 | + # Price Fields | |
| 126 | + - name: "price" | |
| 127 | + type: "DOUBLE" | |
| 128 | + source_table: "extension" | |
| 129 | + source_column: "price" | |
| 130 | + index: true | |
| 131 | + store: true | |
| 132 | + | |
| 118 | 133 | # Text Embedding Fields |
| 119 | 134 | - name: "name_embedding" |
| 120 | 135 | type: "TEXT_EMBEDDING" | ... | ... |
| ... | ... | @@ -0,0 +1,89 @@ |
| 1 | +#!/usr/bin/env python3 | |
| 2 | +""" | |
| 3 | +Debug script to check the generated ES query for sorting | |
| 4 | +""" | |
| 5 | + | |
| 6 | +import requests | |
| 7 | +import json | |
| 8 | + | |
| 9 | +def test_simple_sort(): | |
| 10 | + """Test simple sort functionality via API""" | |
| 11 | + | |
| 12 | + print("Testing simple sort via API...") | |
| 13 | + | |
| 14 | + # Test without sort first | |
| 15 | + request_no_sort = { | |
| 16 | + "query": "芭比", | |
| 17 | + "size": 1 | |
| 18 | + } | |
| 19 | + | |
| 20 | + print("\n1. Search without sort:") | |
| 21 | + try: | |
| 22 | + response = requests.post("http://120.76.41.98:6002/search/", json=request_no_sort) | |
| 23 | + print(f"Status: {response.status_code}") | |
| 24 | + if response.ok: | |
| 25 | + data = response.json() | |
| 26 | + print(f"Results: {data.get('total', 0)} found in {data.get('took_ms', 0)}ms") | |
| 27 | + else: | |
| 28 | + print(f"Error: {response.text}") | |
| 29 | + except Exception as e: | |
| 30 | + print(f"Exception: {e}") | |
| 31 | + | |
| 32 | + # Test with sort | |
| 33 | + request_with_sort = { | |
| 34 | + "query": "芭比", | |
| 35 | + "size": 1, | |
| 36 | + "sort_by": "create_time", | |
| 37 | + "sort_order": "desc" | |
| 38 | + } | |
| 39 | + | |
| 40 | + print("\n2. Search with sort:") | |
| 41 | + print(f"Request: {json.dumps(request_with_sort, ensure_ascii=False, indent=2)}") | |
| 42 | + | |
| 43 | + try: | |
| 44 | + response = requests.post("http://120.76.41.98:6002/search/", json=request_with_sort) | |
| 45 | + print(f"Status: {response.status_code}") | |
| 46 | + if response.ok: | |
| 47 | + data = response.json() | |
| 48 | + print(f"Results: {data.get('total', 0)} found in {data.get('took_ms', 0)}ms") | |
| 49 | + | |
| 50 | + if 'hits' in data and data['hits']: | |
| 51 | + hit = data['hits'][0] | |
| 52 | + source = hit.get('_source', {}) | |
| 53 | + print(f"Sample result: {source.get('name', 'N/A')}") | |
| 54 | + print(f"Create time: {source.get('create_time', 'N/A')}") | |
| 55 | + else: | |
| 56 | + print(f"Error: {response.text}") | |
| 57 | + except Exception as e: | |
| 58 | + print(f"Exception: {e}") | |
| 59 | + | |
| 60 | + # Test with different sort field | |
| 61 | + request_price_sort = { | |
| 62 | + "query": "芭比", | |
| 63 | + "size": 1, | |
| 64 | + "sort_by": "price", | |
| 65 | + "sort_order": "asc" | |
| 66 | + } | |
| 67 | + | |
| 68 | + print("\n3. Search with price sort:") | |
| 69 | + print(f"Request: {json.dumps(request_price_sort, ensure_ascii=False, indent=2)}") | |
| 70 | + | |
| 71 | + try: | |
| 72 | + response = requests.post("http://120.76.41.98:6002/search/", json=request_price_sort) | |
| 73 | + print(f"Status: {response.status_code}") | |
| 74 | + if response.ok: | |
| 75 | + data = response.json() | |
| 76 | + print(f"Results: {data.get('total', 0)} found in {data.get('took_ms', 0)}ms") | |
| 77 | + | |
| 78 | + if 'hits' in data and data['hits']: | |
| 79 | + hit = data['hits'][0] | |
| 80 | + source = hit.get('_source', {}) | |
| 81 | + print(f"Sample result: {source.get('name', 'N/A')}") | |
| 82 | + print(f"Price: {source.get('price', 'N/A')}") | |
| 83 | + else: | |
| 84 | + print(f"Error: {response.text}") | |
| 85 | + except Exception as e: | |
| 86 | + print(f"Exception: {e}") | |
| 87 | + | |
| 88 | +if __name__ == "__main__": | |
| 89 | + test_simple_sort() | |
| 0 | 90 | \ No newline at end of file | ... | ... |
frontend/index.html
| ... | ... | @@ -21,14 +21,18 @@ |
| 21 | 21 | </div> |
| 22 | 22 | |
| 23 | 23 | <div class="search-options"> |
| 24 | - <label><input type="checkbox" id="enableTranslation" checked> 启用翻译</label> | |
| 25 | - <label><input type="checkbox" id="enableEmbedding" checked> 启用语义搜索</label> | |
| 26 | - <label><input type="checkbox" id="enableRerank" checked> 启用自定义排序</label> | |
| 27 | 24 | <select id="resultSize"> |
| 28 | 25 | <option value="10">10条结果</option> |
| 29 | 26 | <option value="20">20条结果</option> |
| 30 | 27 | <option value="50">50条结果</option> |
| 31 | 28 | </select> |
| 29 | + <select id="sortBy"> | |
| 30 | + <option value="">默认排序</option> | |
| 31 | + <option value="create_time:desc">上架时间(新到旧)</option> | |
| 32 | + <option value="create_time:asc">上架时间(旧到新)</option> | |
| 33 | + <option value="price:asc">价格(低到高)</option> | |
| 34 | + <option value="price:desc">价格(高到低)</option> | |
| 35 | + </select> | |
| 32 | 36 | </div> |
| 33 | 37 | |
| 34 | 38 | <div class="search-examples"> |
| ... | ... | @@ -45,9 +49,19 @@ |
| 45 | 49 | <p>搜索中...</p> |
| 46 | 50 | </div> |
| 47 | 51 | |
| 48 | - <div id="results" class="results-section"></div> | |
| 52 | + <div class="content-wrapper"> | |
| 53 | + <div id="aggregationPanel" class="aggregation-panel" style="display: none;"> | |
| 54 | + <h3>筛选条件</h3> | |
| 55 | + <div id="activeFilters" class="active-filters"></div> | |
| 56 | + <div id="aggregationResults" class="aggregation-results"></div> | |
| 57 | + </div> | |
| 58 | + | |
| 59 | + <div class="main-content"> | |
| 60 | + <div id="results" class="results-section"></div> | |
| 49 | 61 | |
| 50 | - <div id="queryInfo" class="query-info"></div> | |
| 62 | + <div id="queryInfo" class="query-info"></div> | |
| 63 | + </div> | |
| 64 | + </div> | |
| 51 | 65 | </div> |
| 52 | 66 | |
| 53 | 67 | <footer> | ... | ... |
frontend/static/css/style.css
| ... | ... | @@ -280,3 +280,153 @@ footer { |
| 280 | 280 | font-size: 24px; |
| 281 | 281 | margin-bottom: 10px; |
| 282 | 282 | } |
| 283 | + | |
| 284 | +/* Layout for aggregation and main content */ | |
| 285 | +.content-wrapper { | |
| 286 | + display: flex; | |
| 287 | + gap: 20px; | |
| 288 | + align-items: flex-start; | |
| 289 | +} | |
| 290 | + | |
| 291 | +/* Aggregation Panel */ | |
| 292 | +.aggregation-panel { | |
| 293 | + background: white; | |
| 294 | + border-radius: 15px; | |
| 295 | + padding: 20px; | |
| 296 | + box-shadow: 0 10px 30px rgba(0,0,0,0.1); | |
| 297 | + width: 300px; | |
| 298 | + flex-shrink: 0; | |
| 299 | +} | |
| 300 | + | |
| 301 | +.aggregation-panel h3 { | |
| 302 | + color: #333; | |
| 303 | + margin-bottom: 20px; | |
| 304 | + font-size: 1.3em; | |
| 305 | + border-bottom: 2px solid #667eea; | |
| 306 | + padding-bottom: 10px; | |
| 307 | +} | |
| 308 | + | |
| 309 | +/* Active Filters */ | |
| 310 | +.active-filters-list { | |
| 311 | + margin-bottom: 20px; | |
| 312 | + display: flex; | |
| 313 | + flex-wrap: wrap; | |
| 314 | + gap: 8px; | |
| 315 | + align-items: center; | |
| 316 | +} | |
| 317 | + | |
| 318 | +.active-filter-tag { | |
| 319 | + background: #667eea; | |
| 320 | + color: white; | |
| 321 | + padding: 4px 8px; | |
| 322 | + border-radius: 15px; | |
| 323 | + font-size: 12px; | |
| 324 | + display: flex; | |
| 325 | + align-items: center; | |
| 326 | + gap: 5px; | |
| 327 | +} | |
| 328 | + | |
| 329 | +.remove-filter { | |
| 330 | + background: none; | |
| 331 | + border: none; | |
| 332 | + color: white; | |
| 333 | + cursor: pointer; | |
| 334 | + font-size: 14px; | |
| 335 | + font-weight: bold; | |
| 336 | + padding: 0; | |
| 337 | + width: 16px; | |
| 338 | + height: 16px; | |
| 339 | + border-radius: 50%; | |
| 340 | + display: flex; | |
| 341 | + align-items: center; | |
| 342 | + justify-content: center; | |
| 343 | +} | |
| 344 | + | |
| 345 | +.remove-filter:hover { | |
| 346 | + background: rgba(255,255,255,0.2); | |
| 347 | +} | |
| 348 | + | |
| 349 | +.clear-filters { | |
| 350 | + background: #ff4444; | |
| 351 | + color: white; | |
| 352 | + border: none; | |
| 353 | + padding: 4px 12px; | |
| 354 | + border-radius: 15px; | |
| 355 | + font-size: 12px; | |
| 356 | + cursor: pointer; | |
| 357 | + transition: background 0.3s; | |
| 358 | +} | |
| 359 | + | |
| 360 | +.clear-filters:hover { | |
| 361 | + background: #cc0000; | |
| 362 | +} | |
| 363 | + | |
| 364 | +/* Aggregation Groups */ | |
| 365 | +.aggregation-group { | |
| 366 | + margin-bottom: 25px; | |
| 367 | +} | |
| 368 | + | |
| 369 | +.aggregation-group h4 { | |
| 370 | + color: #555; | |
| 371 | + margin-bottom: 10px; | |
| 372 | + font-size: 1.1em; | |
| 373 | + font-weight: 600; | |
| 374 | +} | |
| 375 | + | |
| 376 | +.aggregation-items { | |
| 377 | + display: flex; | |
| 378 | + flex-direction: column; | |
| 379 | + gap: 8px; | |
| 380 | +} | |
| 381 | + | |
| 382 | +.aggregation-item { | |
| 383 | + display: flex; | |
| 384 | + align-items: center; | |
| 385 | + gap: 8px; | |
| 386 | + cursor: pointer; | |
| 387 | + padding: 5px; | |
| 388 | + border-radius: 5px; | |
| 389 | + transition: background-color 0.2s; | |
| 390 | +} | |
| 391 | + | |
| 392 | +.aggregation-item:hover { | |
| 393 | + background-color: #f5f5f5; | |
| 394 | +} | |
| 395 | + | |
| 396 | +.aggregation-item input[type="checkbox"] { | |
| 397 | + margin: 0; | |
| 398 | +} | |
| 399 | + | |
| 400 | +.aggregation-item span { | |
| 401 | + flex: 1; | |
| 402 | + font-size: 14px; | |
| 403 | + color: #333; | |
| 404 | +} | |
| 405 | + | |
| 406 | +.aggregation-item .count { | |
| 407 | + color: #888; | |
| 408 | + font-size: 12px; | |
| 409 | + font-weight: normal; | |
| 410 | +} | |
| 411 | + | |
| 412 | +/* Main content area */ | |
| 413 | +.main-content { | |
| 414 | + flex: 1; | |
| 415 | + min-width: 0; /* Allow content to shrink */ | |
| 416 | +} | |
| 417 | + | |
| 418 | +/* Responsive design */ | |
| 419 | +@media (max-width: 768px) { | |
| 420 | + .content-wrapper { | |
| 421 | + flex-direction: column; | |
| 422 | + } | |
| 423 | + | |
| 424 | + .aggregation-panel { | |
| 425 | + width: 100%; | |
| 426 | + order: 2; /* Show below results on mobile */ | |
| 427 | + } | |
| 428 | + | |
| 429 | + .main-content { | |
| 430 | + order: 1; | |
| 431 | + } | |
| 432 | +} | ... | ... |
frontend/static/js/app.js
| ... | ... | @@ -19,6 +19,9 @@ function setQuery(query) { |
| 19 | 19 | performSearch(); |
| 20 | 20 | } |
| 21 | 21 | |
| 22 | +// 全局变量存储当前的过滤条件 | |
| 23 | +let currentFilters = {}; | |
| 24 | + | |
| 22 | 25 | // Perform search |
| 23 | 26 | async function performSearch() { |
| 24 | 27 | const query = document.getElementById('searchInput').value.trim(); |
| ... | ... | @@ -28,16 +31,57 @@ async function performSearch() { |
| 28 | 31 | return; |
| 29 | 32 | } |
| 30 | 33 | |
| 31 | - // Get options (temporarily disable translation and embedding due to GPU issues) | |
| 34 | + // Get options | |
| 32 | 35 | const size = parseInt(document.getElementById('resultSize').value); |
| 33 | - const enableTranslation = false; // Disabled temporarily | |
| 34 | - const enableEmbedding = false; // Disabled temporarily | |
| 35 | - const enableRerank = document.getElementById('enableRerank').checked; | |
| 36 | + const sortByValue = document.getElementById('sortBy').value; | |
| 37 | + | |
| 38 | + // Parse sort option | |
| 39 | + let sort_by = null; | |
| 40 | + let sort_order = 'desc'; | |
| 41 | + if (sortByValue) { | |
| 42 | + const [field, order] = sortByValue.split(':'); | |
| 43 | + sort_by = field; | |
| 44 | + sort_order = order; | |
| 45 | + } | |
| 46 | + | |
| 47 | + // Define aggregations for faceted search | |
| 48 | + const aggregations = { | |
| 49 | + "category_stats": { | |
| 50 | + "terms": { | |
| 51 | + "field": "categoryName_keyword", | |
| 52 | + "size": 10 | |
| 53 | + } | |
| 54 | + }, | |
| 55 | + "brand_stats": { | |
| 56 | + "terms": { | |
| 57 | + "field": "brandName_keyword", | |
| 58 | + "size": 10 | |
| 59 | + } | |
| 60 | + }, | |
| 61 | + "supplier_stats": { | |
| 62 | + "terms": { | |
| 63 | + "field": "supplierName_keyword", | |
| 64 | + "size": 10 | |
| 65 | + } | |
| 66 | + }, | |
| 67 | + "price_ranges": { | |
| 68 | + "range": { | |
| 69 | + "field": "price", | |
| 70 | + "ranges": [ | |
| 71 | + {"key": "0-50", "to": 50}, | |
| 72 | + {"key": "50-100", "from": 50, "to": 100}, | |
| 73 | + {"key": "100-200", "from": 100, "to": 200}, | |
| 74 | + {"key": "200+", "from": 200} | |
| 75 | + ] | |
| 76 | + } | |
| 77 | + } | |
| 78 | + }; | |
| 36 | 79 | |
| 37 | 80 | // Show loading |
| 38 | 81 | document.getElementById('loading').style.display = 'block'; |
| 39 | 82 | document.getElementById('results').innerHTML = ''; |
| 40 | 83 | document.getElementById('queryInfo').innerHTML = ''; |
| 84 | + document.getElementById('aggregationResults').innerHTML = ''; | |
| 41 | 85 | |
| 42 | 86 | try { |
| 43 | 87 | const response = await fetch(`${API_BASE_URL}/search/`, { |
| ... | ... | @@ -48,9 +92,10 @@ async function performSearch() { |
| 48 | 92 | body: JSON.stringify({ |
| 49 | 93 | query: query, |
| 50 | 94 | size: size, |
| 51 | - enable_translation: enableTranslation, | |
| 52 | - enable_embedding: enableEmbedding, | |
| 53 | - enable_rerank: enableRerank | |
| 95 | + filters: Object.keys(currentFilters).length > 0 ? currentFilters : null, | |
| 96 | + aggregations: aggregations, | |
| 97 | + sort_by: sort_by, | |
| 98 | + sort_order: sort_order | |
| 54 | 99 | }) |
| 55 | 100 | }); |
| 56 | 101 | |
| ... | ... | @@ -61,6 +106,8 @@ async function performSearch() { |
| 61 | 106 | const data = await response.json(); |
| 62 | 107 | displayResults(data); |
| 63 | 108 | displayQueryInfo(data.query_info); |
| 109 | + displayAggregations(data.aggregations); | |
| 110 | + displayActiveFilters(); | |
| 64 | 111 | |
| 65 | 112 | } catch (error) { |
| 66 | 113 | console.error('Search error:', error); |
| ... | ... | @@ -119,6 +166,7 @@ function displayResults(data) { |
| 119 | 166 | </div> |
| 120 | 167 | |
| 121 | 168 | <div class="result-meta"> |
| 169 | + ${source.price ? `<span>💰 ¥${escapeHtml(source.price)}</span>` : ''} | |
| 122 | 170 | ${source.categoryName ? `<span>📁 ${escapeHtml(source.categoryName)}</span>` : ''} |
| 123 | 171 | ${source.brandName ? `<span>🏷️ ${escapeHtml(source.brandName)}</span>` : ''} |
| 124 | 172 | ${source.supplierName ? `<span>🏭 ${escapeHtml(source.supplierName)}</span>` : ''} |
| ... | ... | @@ -227,6 +275,236 @@ function getLanguageName(code) { |
| 227 | 275 | return names[code] || code; |
| 228 | 276 | } |
| 229 | 277 | |
| 278 | +// Display aggregations | |
| 279 | +function displayAggregations(aggregations) { | |
| 280 | + if (!aggregations || Object.keys(aggregations).length === 0) { | |
| 281 | + document.getElementById('aggregationPanel').style.display = 'none'; | |
| 282 | + return; | |
| 283 | + } | |
| 284 | + | |
| 285 | + document.getElementById('aggregationPanel').style.display = 'block'; | |
| 286 | + const aggregationResultsDiv = document.getElementById('aggregationResults'); | |
| 287 | + | |
| 288 | + let html = ''; | |
| 289 | + | |
| 290 | + // Category aggregation | |
| 291 | + if (aggregations.category_stats && aggregations.category_stats.buckets) { | |
| 292 | + html += ` | |
| 293 | + <div class="aggregation-group"> | |
| 294 | + <h4>商品分类</h4> | |
| 295 | + <div class="aggregation-items"> | |
| 296 | + `; | |
| 297 | + | |
| 298 | + aggregations.category_stats.buckets.forEach(bucket => { | |
| 299 | + const key = bucket.key; | |
| 300 | + const count = bucket.doc_count; | |
| 301 | + const isChecked = currentFilters.categoryName_keyword && currentFilters.categoryName_keyword.includes(key); | |
| 302 | + | |
| 303 | + html += ` | |
| 304 | + <label class="aggregation-item"> | |
| 305 | + <input type="checkbox" | |
| 306 | + ${isChecked ? 'checked' : ''} | |
| 307 | + onchange="toggleFilter('categoryName_keyword', '${escapeHtml(key)}', this.checked)"> | |
| 308 | + <span>${escapeHtml(key)}</span> | |
| 309 | + <span class="count">(${count})</span> | |
| 310 | + </label> | |
| 311 | + `; | |
| 312 | + }); | |
| 313 | + | |
| 314 | + html += '</div></div>'; | |
| 315 | + } | |
| 316 | + | |
| 317 | + // Brand aggregation | |
| 318 | + if (aggregations.brand_stats && aggregations.brand_stats.buckets) { | |
| 319 | + html += ` | |
| 320 | + <div class="aggregation-group"> | |
| 321 | + <h4>品牌</h4> | |
| 322 | + <div class="aggregation-items"> | |
| 323 | + `; | |
| 324 | + | |
| 325 | + aggregations.brand_stats.buckets.forEach(bucket => { | |
| 326 | + const key = bucket.key; | |
| 327 | + const count = bucket.doc_count; | |
| 328 | + const isChecked = currentFilters.brandName_keyword && currentFilters.brandName_keyword.includes(key); | |
| 329 | + | |
| 330 | + html += ` | |
| 331 | + <label class="aggregation-item"> | |
| 332 | + <input type="checkbox" | |
| 333 | + ${isChecked ? 'checked' : ''} | |
| 334 | + onchange="toggleFilter('brandName_keyword', '${escapeHtml(key)}', this.checked)"> | |
| 335 | + <span>${escapeHtml(key)}</span> | |
| 336 | + <span class="count">(${count})</span> | |
| 337 | + </label> | |
| 338 | + `; | |
| 339 | + }); | |
| 340 | + | |
| 341 | + html += '</div></div>'; | |
| 342 | + } | |
| 343 | + | |
| 344 | + // Supplier aggregation | |
| 345 | + if (aggregations.supplier_stats && aggregations.supplier_stats.buckets) { | |
| 346 | + html += ` | |
| 347 | + <div class="aggregation-group"> | |
| 348 | + <h4>供应商</h4> | |
| 349 | + <div class="aggregation-items"> | |
| 350 | + `; | |
| 351 | + | |
| 352 | + aggregations.supplier_stats.buckets.slice(0, 5).forEach(bucket => { | |
| 353 | + const key = bucket.key; | |
| 354 | + const count = bucket.doc_count; | |
| 355 | + const isChecked = currentFilters.supplierName_keyword && currentFilters.supplierName_keyword.includes(key); | |
| 356 | + | |
| 357 | + html += ` | |
| 358 | + <label class="aggregation-item"> | |
| 359 | + <input type="checkbox" | |
| 360 | + ${isChecked ? 'checked' : ''} | |
| 361 | + onchange="toggleFilter('supplierName_keyword', '${escapeHtml(key)}', this.checked)"> | |
| 362 | + <span>${escapeHtml(key)}</span> | |
| 363 | + <span class="count">(${count})</span> | |
| 364 | + </label> | |
| 365 | + `; | |
| 366 | + }); | |
| 367 | + | |
| 368 | + html += '</div></div>'; | |
| 369 | + } | |
| 370 | + | |
| 371 | + // Price range aggregation | |
| 372 | + if (aggregations.price_ranges && aggregations.price_ranges.buckets) { | |
| 373 | + html += ` | |
| 374 | + <div class="aggregation-group"> | |
| 375 | + <h4>价格区间</h4> | |
| 376 | + <div class="aggregation-items"> | |
| 377 | + `; | |
| 378 | + | |
| 379 | + aggregations.price_ranges.buckets.forEach(bucket => { | |
| 380 | + const key = bucket.key; | |
| 381 | + const count = bucket.doc_count; | |
| 382 | + const isChecked = currentFilters.price_ranges && currentFilters.price_ranges.includes(key); | |
| 383 | + | |
| 384 | + const priceLabel = { | |
| 385 | + '0-50': '¥0-50', | |
| 386 | + '50-100': '¥50-100', | |
| 387 | + '100-200': '¥100-200', | |
| 388 | + '200+': '¥200+' | |
| 389 | + }; | |
| 390 | + | |
| 391 | + html += ` | |
| 392 | + <label class="aggregation-item"> | |
| 393 | + <input type="checkbox" | |
| 394 | + ${isChecked ? 'checked' : ''} | |
| 395 | + onchange="togglePriceFilter('${escapeHtml(key)}', this.checked)"> | |
| 396 | + <span>${priceLabel[key] || key}</span> | |
| 397 | + <span class="count">(${count})</span> | |
| 398 | + </label> | |
| 399 | + `; | |
| 400 | + }); | |
| 401 | + | |
| 402 | + html += '</div></div>'; | |
| 403 | + } | |
| 404 | + | |
| 405 | + aggregationResultsDiv.innerHTML = html; | |
| 406 | +} | |
| 407 | + | |
| 408 | +// Display active filters | |
| 409 | +function displayActiveFilters() { | |
| 410 | + const activeFiltersDiv = document.getElementById('activeFilters'); | |
| 411 | + | |
| 412 | + if (Object.keys(currentFilters).length === 0) { | |
| 413 | + activeFiltersDiv.innerHTML = ''; | |
| 414 | + return; | |
| 415 | + } | |
| 416 | + | |
| 417 | + let html = '<div class="active-filters-list">'; | |
| 418 | + | |
| 419 | + Object.entries(currentFilters).forEach(([field, values]) => { | |
| 420 | + if (Array.isArray(values)) { | |
| 421 | + values.forEach(value => { | |
| 422 | + let displayValue = value; | |
| 423 | + if (field === 'price_ranges') { | |
| 424 | + const priceLabel = { | |
| 425 | + '0-50': '¥0-50', | |
| 426 | + '50-100': '¥50-100', | |
| 427 | + '100-200': '¥100-200', | |
| 428 | + '200+': '¥200+' | |
| 429 | + }; | |
| 430 | + displayValue = priceLabel[value] || value; | |
| 431 | + } | |
| 432 | + | |
| 433 | + html += ` | |
| 434 | + <span class="active-filter-tag"> | |
| 435 | + ${escapeHtml(displayValue)} | |
| 436 | + <button onclick="removeFilter('${field}', '${escapeHtml(value)}')" class="remove-filter">×</button> | |
| 437 | + </span> | |
| 438 | + `; | |
| 439 | + }); | |
| 440 | + } | |
| 441 | + }); | |
| 442 | + | |
| 443 | + html += `<button onclick="clearAllFilters()" class="clear-filters">清除所有</button></div>`; | |
| 444 | + activeFiltersDiv.innerHTML = html; | |
| 445 | +} | |
| 446 | + | |
| 447 | +// Toggle filter | |
| 448 | +function toggleFilter(field, value, checked) { | |
| 449 | + if (checked) { | |
| 450 | + if (!currentFilters[field]) { | |
| 451 | + currentFilters[field] = []; | |
| 452 | + } | |
| 453 | + if (!currentFilters[field].includes(value)) { | |
| 454 | + currentFilters[field].push(value); | |
| 455 | + } | |
| 456 | + } else { | |
| 457 | + if (currentFilters[field]) { | |
| 458 | + const index = currentFilters[field].indexOf(value); | |
| 459 | + if (index > -1) { | |
| 460 | + currentFilters[field].splice(index, 1); | |
| 461 | + } | |
| 462 | + if (currentFilters[field].length === 0) { | |
| 463 | + delete currentFilters[field]; | |
| 464 | + } | |
| 465 | + } | |
| 466 | + } | |
| 467 | + | |
| 468 | + // Re-run search with new filters | |
| 469 | + performSearch(); | |
| 470 | +} | |
| 471 | + | |
| 472 | +// Toggle price filter | |
| 473 | +function togglePriceFilter(value, checked) { | |
| 474 | + if (checked) { | |
| 475 | + if (!currentFilters.price_ranges) { | |
| 476 | + currentFilters.price_ranges = []; | |
| 477 | + } | |
| 478 | + if (!currentFilters.price_ranges.includes(value)) { | |
| 479 | + currentFilters.price_ranges.push(value); | |
| 480 | + } | |
| 481 | + } else { | |
| 482 | + if (currentFilters.price_ranges) { | |
| 483 | + const index = currentFilters.price_ranges.indexOf(value); | |
| 484 | + if (index > -1) { | |
| 485 | + currentFilters.price_ranges.splice(index, 1); | |
| 486 | + } | |
| 487 | + if (currentFilters.price_ranges.length === 0) { | |
| 488 | + delete currentFilters.price_ranges; | |
| 489 | + } | |
| 490 | + } | |
| 491 | + } | |
| 492 | + | |
| 493 | + // Re-run search with new filters | |
| 494 | + performSearch(); | |
| 495 | +} | |
| 496 | + | |
| 497 | +// Remove single filter | |
| 498 | +function removeFilter(field, value) { | |
| 499 | + toggleFilter(field, value, false); | |
| 500 | +} | |
| 501 | + | |
| 502 | +// Clear all filters | |
| 503 | +function clearAllFilters() { | |
| 504 | + currentFilters = {}; | |
| 505 | + performSearch(); | |
| 506 | +} | |
| 507 | + | |
| 230 | 508 | // Initialize page |
| 231 | 509 | document.addEventListener('DOMContentLoaded', function() { |
| 232 | 510 | console.log('SearchEngine Frontend loaded'); | ... | ... |
search/es_query_builder.py
| ... | ... | @@ -202,7 +202,36 @@ class ESQueryBuilder: |
| 202 | 202 | filter_clauses = [] |
| 203 | 203 | |
| 204 | 204 | for field, value in filters.items(): |
| 205 | - if isinstance(value, dict): | |
| 205 | + if field == 'price_ranges': | |
| 206 | + # Handle price range filters | |
| 207 | + if isinstance(value, list): | |
| 208 | + price_ranges = [] | |
| 209 | + for price_range in value: | |
| 210 | + if price_range == '0-50': | |
| 211 | + price_ranges.append({"lt": 50}) | |
| 212 | + elif price_range == '50-100': | |
| 213 | + price_ranges.append({"gte": 50, "lt": 100}) | |
| 214 | + elif price_range == '100-200': | |
| 215 | + price_ranges.append({"gte": 100, "lt": 200}) | |
| 216 | + elif price_range == '200+': | |
| 217 | + price_ranges.append({"gte": 200}) | |
| 218 | + | |
| 219 | + if price_ranges: | |
| 220 | + if len(price_ranges) == 1: | |
| 221 | + filter_clauses.append({ | |
| 222 | + "range": { | |
| 223 | + "price": price_ranges[0] | |
| 224 | + } | |
| 225 | + }) | |
| 226 | + else: | |
| 227 | + # Multiple price ranges - use bool should clause | |
| 228 | + range_clauses = [{"range": {"price": pr}} for pr in price_ranges] | |
| 229 | + filter_clauses.append({ | |
| 230 | + "bool": { | |
| 231 | + "should": range_clauses | |
| 232 | + } | |
| 233 | + }) | |
| 234 | + elif isinstance(value, dict): | |
| 206 | 235 | # Range query |
| 207 | 236 | if "gte" in value or "lte" in value or "gt" in value or "lt" in value: |
| 208 | 237 | filter_clauses.append({ |
| ... | ... | @@ -266,6 +295,65 @@ class ESQueryBuilder: |
| 266 | 295 | |
| 267 | 296 | return es_query |
| 268 | 297 | |
| 298 | + def add_dynamic_aggregations( | |
| 299 | + self, | |
| 300 | + es_query: Dict[str, Any], | |
| 301 | + aggregations: Dict[str, Any] | |
| 302 | + ) -> Dict[str, Any]: | |
| 303 | + """ | |
| 304 | + Add dynamic aggregations based on request parameters. | |
| 305 | + | |
| 306 | + Args: | |
| 307 | + es_query: Existing ES query | |
| 308 | + aggregations: Aggregation specifications | |
| 309 | + | |
| 310 | + Returns: | |
| 311 | + Modified ES query | |
| 312 | + """ | |
| 313 | + if "aggs" not in es_query: | |
| 314 | + es_query["aggs"] = {} | |
| 315 | + | |
| 316 | + for agg_name, agg_spec in aggregations.items(): | |
| 317 | + es_query["aggs"][agg_name] = agg_spec | |
| 318 | + | |
| 319 | + return es_query | |
| 320 | + | |
| 321 | + def add_sorting( | |
| 322 | + self, | |
| 323 | + es_query: Dict[str, Any], | |
| 324 | + sort_by: str, | |
| 325 | + sort_order: str = "desc" | |
| 326 | + ) -> Dict[str, Any]: | |
| 327 | + """ | |
| 328 | + Add sorting to ES query. | |
| 329 | + | |
| 330 | + Args: | |
| 331 | + es_query: Existing ES query | |
| 332 | + sort_by: Field name for sorting | |
| 333 | + sort_order: Sort order: 'asc' or 'desc' | |
| 334 | + | |
| 335 | + Returns: | |
| 336 | + Modified ES query | |
| 337 | + """ | |
| 338 | + if not sort_by: | |
| 339 | + return es_query | |
| 340 | + | |
| 341 | + if not sort_order: | |
| 342 | + sort_order = "desc" | |
| 343 | + | |
| 344 | + if "sort" not in es_query: | |
| 345 | + es_query["sort"] = [] | |
| 346 | + | |
| 347 | + # Add the specified sort | |
| 348 | + sort_field = { | |
| 349 | + sort_by: { | |
| 350 | + "order": sort_order.lower() | |
| 351 | + } | |
| 352 | + } | |
| 353 | + es_query["sort"].append(sort_field) | |
| 354 | + | |
| 355 | + return es_query | |
| 356 | + | |
| 269 | 357 | def add_aggregations( |
| 270 | 358 | self, |
| 271 | 359 | es_query: Dict[str, Any], | ... | ... |
search/multilang_query_builder.py
| ... | ... | @@ -319,7 +319,11 @@ class MultiLanguageQueryBuilder(ESQueryBuilder): |
| 319 | 319 | if hasattr(node, 'operator'): |
| 320 | 320 | # QueryNode object |
| 321 | 321 | operator = node.operator |
| 322 | - terms = node.terms | |
| 322 | + terms = node.terms if hasattr(node, 'terms') else None | |
| 323 | + | |
| 324 | + # For TERM nodes, check if there's a value | |
| 325 | + if operator == 'TERM' and hasattr(node, 'value') and node.value: | |
| 326 | + terms = node.value | |
| 323 | 327 | elif isinstance(node, tuple) and len(node) > 0: |
| 324 | 328 | # Tuple format from boolean parser |
| 325 | 329 | if hasattr(node[0], 'operator'): |
| ... | ... | @@ -353,9 +357,7 @@ class MultiLanguageQueryBuilder(ESQueryBuilder): |
| 353 | 357 | else: |
| 354 | 358 | return {"match_all": {}} |
| 355 | 359 | |
| 356 | - print(f"[MultiLangQueryBuilder] Building boolean query for operator: {operator}") | |
| 357 | - print(f"[MultiLangQueryBuilder] Terms: {terms}") | |
| 358 | - | |
| 360 | + | |
| 359 | 361 | if operator == 'TERM': |
| 360 | 362 | # Leaf node - handle field:query format |
| 361 | 363 | if isinstance(terms, str) and ':' in terms: |
| ... | ... | @@ -365,31 +367,58 @@ class MultiLanguageQueryBuilder(ESQueryBuilder): |
| 365 | 367 | field: value |
| 366 | 368 | } |
| 367 | 369 | } |
| 370 | + elif isinstance(terms, str): | |
| 371 | + # Simple text term - create match query | |
| 372 | + return { | |
| 373 | + "multi_match": { | |
| 374 | + "query": terms, | |
| 375 | + "fields": self.match_fields, | |
| 376 | + "type": "best_fields", | |
| 377 | + "operator": "AND" | |
| 378 | + } | |
| 379 | + } | |
| 368 | 380 | else: |
| 369 | - return {"match_all": {}} | |
| 381 | + # Invalid TERM node - return empty match | |
| 382 | + return { | |
| 383 | + "match_none": {} | |
| 384 | + } | |
| 370 | 385 | |
| 371 | 386 | elif operator == 'OR': |
| 372 | 387 | # Any term must match |
| 373 | 388 | should_clauses = [] |
| 374 | - for term in terms: | |
| 375 | - should_clauses.append(self._build_boolean_query_from_tuple(term)) | |
| 376 | - return { | |
| 377 | - "bool": { | |
| 378 | - "should": should_clauses, | |
| 379 | - "minimum_should_match": 1 | |
| 389 | + if terms: | |
| 390 | + for term in terms: | |
| 391 | + clause = self._build_boolean_query_from_tuple(term) | |
| 392 | + if clause and clause.get("match_none") is None: | |
| 393 | + should_clauses.append(clause) | |
| 394 | + | |
| 395 | + if should_clauses: | |
| 396 | + return { | |
| 397 | + "bool": { | |
| 398 | + "should": should_clauses, | |
| 399 | + "minimum_should_match": 1 | |
| 400 | + } | |
| 380 | 401 | } |
| 381 | - } | |
| 402 | + else: | |
| 403 | + return {"match_none": {}} | |
| 382 | 404 | |
| 383 | 405 | elif operator == 'AND': |
| 384 | 406 | # All terms must match |
| 385 | 407 | must_clauses = [] |
| 386 | - for term in terms: | |
| 387 | - must_clauses.append(self._build_boolean_query_from_tuple(term)) | |
| 388 | - return { | |
| 389 | - "bool": { | |
| 390 | - "must": must_clauses | |
| 408 | + if terms: | |
| 409 | + for term in terms: | |
| 410 | + clause = self._build_boolean_query_from_tuple(term) | |
| 411 | + if clause and clause.get("match_none") is None: | |
| 412 | + must_clauses.append(clause) | |
| 413 | + | |
| 414 | + if must_clauses: | |
| 415 | + return { | |
| 416 | + "bool": { | |
| 417 | + "must": must_clauses | |
| 418 | + } | |
| 391 | 419 | } |
| 392 | - } | |
| 420 | + else: | |
| 421 | + return {"match_none": {}} | |
| 393 | 422 | |
| 394 | 423 | elif operator == 'ANDNOT': |
| 395 | 424 | # First term must match, second must not | ... | ... |
search/searcher.py
| ... | ... | @@ -103,7 +103,10 @@ class Searcher: |
| 103 | 103 | from_: int = 0, |
| 104 | 104 | filters: Optional[Dict[str, Any]] = None, |
| 105 | 105 | min_score: Optional[float] = None, |
| 106 | - context: Optional[RequestContext] = None | |
| 106 | + context: Optional[RequestContext] = None, | |
| 107 | + aggregations: Optional[Dict[str, Any]] = None, | |
| 108 | + sort_by: Optional[str] = None, | |
| 109 | + sort_order: Optional[str] = "desc" | |
| 107 | 110 | ) -> SearchResult: |
| 108 | 111 | """ |
| 109 | 112 | Execute search query. |
| ... | ... | @@ -115,6 +118,9 @@ class Searcher: |
| 115 | 118 | filters: Additional filters (field: value pairs) |
| 116 | 119 | min_score: Minimum score threshold |
| 117 | 120 | context: Request context for tracking (created if not provided) |
| 121 | + aggregations: Aggregation specifications for faceted search | |
| 122 | + sort_by: Field name for sorting | |
| 123 | + sort_order: Sort order: 'asc' or 'desc' | |
| 118 | 124 | |
| 119 | 125 | Returns: |
| 120 | 126 | SearchResult object |
| ... | ... | @@ -146,7 +152,10 @@ class Searcher: |
| 146 | 152 | 'enable_translation': enable_translation, |
| 147 | 153 | 'enable_embedding': enable_embedding, |
| 148 | 154 | 'enable_rerank': enable_rerank, |
| 149 | - 'min_score': min_score | |
| 155 | + 'min_score': min_score, | |
| 156 | + 'aggregations': aggregations, | |
| 157 | + 'sort_by': sort_by, | |
| 158 | + 'sort_order': sort_order | |
| 150 | 159 | } |
| 151 | 160 | |
| 152 | 161 | context.metadata['feature_flags'] = { |
| ... | ... | @@ -247,11 +256,19 @@ class Searcher: |
| 247 | 256 | ) |
| 248 | 257 | |
| 249 | 258 | # Add aggregations for faceted search |
| 250 | - if filters: | |
| 259 | + if aggregations: | |
| 260 | + # Use dynamic aggregations from request | |
| 261 | + es_query = self.query_builder.add_dynamic_aggregations(es_query, aggregations) | |
| 262 | + elif filters: | |
| 263 | + # Fallback to filter-based aggregations | |
| 251 | 264 | agg_fields = [f"{k}_keyword" for k in filters.keys() if f"{k}_keyword" in [f.name for f in self.config.fields]] |
| 252 | 265 | if agg_fields: |
| 253 | 266 | es_query = self.query_builder.add_aggregations(es_query, agg_fields) |
| 254 | 267 | |
| 268 | + # Add sorting if specified | |
| 269 | + if sort_by: | |
| 270 | + es_query = self.query_builder.add_sorting(es_query, sort_by, sort_order) | |
| 271 | + | |
| 255 | 272 | # Extract size and from from body for ES client parameters |
| 256 | 273 | body_for_es = {k: v for k, v in es_query.items() if k not in ['size', 'from']} |
| 257 | 274 | ... | ... |
| ... | ... | @@ -0,0 +1,340 @@ |
| 1 | +#!/usr/bin/env python3 | |
| 2 | +""" | |
| 3 | +Simple API server for testing aggregation functionality without external dependencies. | |
| 4 | +""" | |
| 5 | + | |
| 6 | +import json | |
| 7 | +import time | |
| 8 | +import random | |
| 9 | +from http.server import HTTPServer, BaseHTTPRequestHandler | |
| 10 | +from urllib.parse import urlparse, parse_qs | |
| 11 | +import threading | |
| 12 | + | |
| 13 | +class SearchAPIHandler(BaseHTTPRequestHandler): | |
| 14 | + """Simple API handler for search requests.""" | |
| 15 | + | |
| 16 | + def do_OPTIONS(self): | |
| 17 | + """Handle CORS preflight requests.""" | |
| 18 | + self.send_response(200) | |
| 19 | + self.send_header('Access-Control-Allow-Origin', '*') | |
| 20 | + self.send_header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS') | |
| 21 | + self.send_header('Access-Control-Allow-Headers', 'Content-Type') | |
| 22 | + self.end_headers() | |
| 23 | + | |
| 24 | + def do_POST(self): | |
| 25 | + """Handle POST requests.""" | |
| 26 | + if self.path == '/': | |
| 27 | + self.handle_search() | |
| 28 | + elif self.path == '/search/': | |
| 29 | + self.handle_search() | |
| 30 | + else: | |
| 31 | + self.send_response(404) | |
| 32 | + self.end_headers() | |
| 33 | + | |
| 34 | + def handle_search(self): | |
| 35 | + """Handle search requests with aggregations.""" | |
| 36 | + try: | |
| 37 | + # Read request body | |
| 38 | + content_length = int(self.headers['Content-Length']) | |
| 39 | + post_data = self.rfile.read(content_length) | |
| 40 | + request_data = json.loads(post_data.decode('utf-8')) | |
| 41 | + | |
| 42 | + query = request_data.get('query', '') | |
| 43 | + size = request_data.get('size', 10) | |
| 44 | + sort_by = request_data.get('sort_by', 'relevance') | |
| 45 | + aggregations = request_data.get('aggregations', {}) | |
| 46 | + filters = request_data.get('filters', {}) | |
| 47 | + | |
| 48 | + print(f"Search request: query='{query}', size={size}, sort_by={sort_by}") | |
| 49 | + print(f"Aggregations: {list(aggregations.keys()) if aggregations else 'None'}") | |
| 50 | + print(f"Filters: {filters if filters else 'None'}") | |
| 51 | + | |
| 52 | + # Simulate processing time | |
| 53 | + time.sleep(0.1) | |
| 54 | + | |
| 55 | + # Generate mock search results | |
| 56 | + results = self.generate_mock_results(query, size, sort_by, filters) | |
| 57 | + | |
| 58 | + # Generate mock aggregations | |
| 59 | + aggregation_results = self.generate_mock_aggregations(aggregations, filters) | |
| 60 | + | |
| 61 | + # Build response | |
| 62 | + response = { | |
| 63 | + "hits": results, | |
| 64 | + "total": len(results) + random.randint(10, 100), | |
| 65 | + "max_score": round(random.uniform(1.5, 3.5), 3), | |
| 66 | + "took_ms": random.randint(15, 45), | |
| 67 | + "aggregations": aggregation_results, | |
| 68 | + "query_info": { | |
| 69 | + "original_query": query, | |
| 70 | + "rewritten_query": query, | |
| 71 | + "detected_language": "zh" if any('\u4e00' <= char <= '\u9fff' for char in query) else "en", | |
| 72 | + "domain": "default", | |
| 73 | + "translations": {}, | |
| 74 | + "has_vector": False | |
| 75 | + } | |
| 76 | + } | |
| 77 | + | |
| 78 | + # Send response | |
| 79 | + self.send_response(200) | |
| 80 | + self.send_header('Content-Type', 'application/json') | |
| 81 | + self.send_header('Access-Control-Allow-Origin', '*') | |
| 82 | + self.end_headers() | |
| 83 | + | |
| 84 | + response_json = json.dumps(response, ensure_ascii=False, indent=2) | |
| 85 | + self.wfile.write(response_json.encode('utf-8')) | |
| 86 | + | |
| 87 | + print(f"Response sent with {len(results)} results and {len(aggregation_results)} aggregations") | |
| 88 | + | |
| 89 | + except Exception as e: | |
| 90 | + print(f"Error handling request: {e}") | |
| 91 | + self.send_response(500) | |
| 92 | + self.send_header('Content-Type', 'application/json') | |
| 93 | + self.send_header('Access-Control-Allow-Origin', '*') | |
| 94 | + self.end_headers() | |
| 95 | + | |
| 96 | + error_response = { | |
| 97 | + "error": str(e), | |
| 98 | + "detail": "Internal server error" | |
| 99 | + } | |
| 100 | + | |
| 101 | + self.wfile.write(json.dumps(error_response).encode('utf-8')) | |
| 102 | + | |
| 103 | + def generate_mock_results(self, query, size, sort_by, filters): | |
| 104 | + """Generate mock search results.""" | |
| 105 | + | |
| 106 | + # Sample product data | |
| 107 | + sample_products = [ | |
| 108 | + { | |
| 109 | + "skuId": 1001, | |
| 110 | + "name": "芭比娃娃梦幻套装", | |
| 111 | + "enSpuName": "Barbie Dream House Playset", | |
| 112 | + "ruSkuName": "Кукла Барби Мечтательный домик", | |
| 113 | + "categoryName": "芭比", | |
| 114 | + "brandName": "美泰", | |
| 115 | + "supplierName": "义乌玩具厂", | |
| 116 | + "price": 89.99, | |
| 117 | + "imageUrl": "https://picsum.photos/seed/barbie1/200/200.jpg", | |
| 118 | + "create_time": "2024-01-15T10:30:00Z", | |
| 119 | + "days_since_last_update": 45 | |
| 120 | + }, | |
| 121 | + { | |
| 122 | + "skuId": 1002, | |
| 123 | + "name": "芭比娃娃时尚系列", | |
| 124 | + "enSpuName": "Barbie Fashion Doll Collection", | |
| 125 | + "ruSkuName": "Кукла Барби Модная коллекция", | |
| 126 | + "categoryName": "芭比", | |
| 127 | + "brandName": "美泰", | |
| 128 | + "supplierName": "汕头玩具公司", | |
| 129 | + "price": 45.50, | |
| 130 | + "imageUrl": "https://picsum.photos/seed/barbie2/200/200.jpg", | |
| 131 | + "create_time": "2024-02-20T14:15:00Z", | |
| 132 | + "days_since_last_update": 30 | |
| 133 | + }, | |
| 134 | + { | |
| 135 | + "skuId": 1003, | |
| 136 | + "name": "儿童积木套装", | |
| 137 | + "enSpuName": "Kids Building Blocks Set", | |
| 138 | + "ruSkuName": "Детский строительный набор", | |
| 139 | + "categoryName": "积木", | |
| 140 | + "brandName": "乐高", | |
| 141 | + "supplierName": "深圳塑胶制品厂", | |
| 142 | + "price": 158.00, | |
| 143 | + "imageUrl": "https://picsum.photos/seed/blocks1/200/200.jpg", | |
| 144 | + "create_time": "2024-01-10T09:20:00Z", | |
| 145 | + "days_since_last_update": 60 | |
| 146 | + }, | |
| 147 | + { | |
| 148 | + "skuId": 1004, | |
| 149 | + "name": "消防车玩具模型", | |
| 150 | + "enSpuName": "Fire Truck Toy Model", | |
| 151 | + "ruSkuName": "Модель пожарной машины", | |
| 152 | + "categoryName": "小汽车", | |
| 153 | + "brandName": "多美卡", | |
| 154 | + "supplierName": "东莞玩具制造厂", | |
| 155 | + "price": 78.50, | |
| 156 | + "imageUrl": "https://picsum.photos/seed/firetruck1/200/200.jpg", | |
| 157 | + "create_time": "2024-03-05T16:45:00Z", | |
| 158 | + "days_since_last_update": 15 | |
| 159 | + }, | |
| 160 | + { | |
| 161 | + "skuId": 1005, | |
| 162 | + "name": "婴儿毛绒玩具", | |
| 163 | + "enSpuName": "Baby Plush Toy", | |
| 164 | + "ruSkuName": "Детская плюшевая игрушка", | |
| 165 | + "categoryName": "婴儿娃娃", | |
| 166 | + "brandName": "迪士尼", | |
| 167 | + "supplierName": "上海礼品公司", | |
| 168 | + "price": 32.00, | |
| 169 | + "imageUrl": "https://picsum.photos/seed/plush1/200/200.jpg", | |
| 170 | + "create_time": "2024-02-14T11:30:00Z", | |
| 171 | + "days_since_last_update": 25 | |
| 172 | + } | |
| 173 | + ] | |
| 174 | + | |
| 175 | + # Apply filters if any | |
| 176 | + if filters: | |
| 177 | + filtered_products = [] | |
| 178 | + for product in sample_products: | |
| 179 | + include = True | |
| 180 | + | |
| 181 | + # Check category filter | |
| 182 | + if 'category_name' in filters: | |
| 183 | + if product['categoryName'] not in filters['category_name']: | |
| 184 | + include = False | |
| 185 | + | |
| 186 | + # Check brand filter | |
| 187 | + if 'brand_name' in filters: | |
| 188 | + if product['brandName'] not in filters['brand_name']: | |
| 189 | + include = False | |
| 190 | + | |
| 191 | + # Check price range filter | |
| 192 | + if 'price_ranges' in filters: | |
| 193 | + price = product['price'] | |
| 194 | + in_range = False | |
| 195 | + for price_range in filters['price_ranges']: | |
| 196 | + if price_range == '0-50' and price <= 50: | |
| 197 | + in_range = True | |
| 198 | + elif price_range == '50-100' and 50 < price <= 100: | |
| 199 | + in_range = True | |
| 200 | + elif price_range == '100-200' and 100 < price <= 200: | |
| 201 | + in_range = True | |
| 202 | + elif price_range == '200+' and price > 200: | |
| 203 | + in_range = True | |
| 204 | + if not in_range: | |
| 205 | + include = False | |
| 206 | + | |
| 207 | + if include: | |
| 208 | + filtered_products.append(product) | |
| 209 | + sample_products = filtered_products | |
| 210 | + | |
| 211 | + # Apply sorting | |
| 212 | + if sort_by == 'price_asc': | |
| 213 | + sample_products.sort(key=lambda x: x.get('price', 0)) | |
| 214 | + elif sort_by == 'price_desc': | |
| 215 | + sample_products.sort(key=lambda x: x.get('price', 0), reverse=True) | |
| 216 | + elif sort_by == 'time_desc': | |
| 217 | + sample_products.sort(key=lambda x: x.get('create_time', ''), reverse=True) | |
| 218 | + | |
| 219 | + # Convert to API response format | |
| 220 | + results = [] | |
| 221 | + for i, product in enumerate(sample_products[:size]): | |
| 222 | + hit = { | |
| 223 | + "_id": str(product['skuId']), | |
| 224 | + "_score": round(random.uniform(1.5, 3.5), 3), | |
| 225 | + "_source": product | |
| 226 | + } | |
| 227 | + results.append(hit) | |
| 228 | + | |
| 229 | + return results | |
| 230 | + | |
| 231 | + def generate_mock_aggregations(self, aggregations, filters): | |
| 232 | + """Generate mock aggregation results.""" | |
| 233 | + if not aggregations: | |
| 234 | + return {} | |
| 235 | + | |
| 236 | + result = {} | |
| 237 | + | |
| 238 | + for agg_name, agg_spec in aggregations.items(): | |
| 239 | + agg_type = agg_spec.get('type', 'terms') | |
| 240 | + | |
| 241 | + if agg_type == 'terms': | |
| 242 | + # Generate mock terms aggregation | |
| 243 | + if agg_name == 'category_name': | |
| 244 | + buckets = [ | |
| 245 | + {"key": "芭比", "doc_count": random.randint(15, 35)}, | |
| 246 | + {"key": "儿童娃娃", "doc_count": random.randint(8, 20)}, | |
| 247 | + {"key": "积木", "doc_count": random.randint(5, 15)}, | |
| 248 | + {"key": "小汽车", "doc_count": random.randint(3, 12)}, | |
| 249 | + {"key": "婴儿娃娃", "doc_count": random.randint(4, 10)}, | |
| 250 | + {"key": "人物", "doc_count": random.randint(6, 18)} | |
| 251 | + ] | |
| 252 | + elif agg_name == 'brand_name': | |
| 253 | + buckets = [ | |
| 254 | + {"key": "美泰", "doc_count": random.randint(20, 40)}, | |
| 255 | + {"key": "乐高", "doc_count": random.randint(10, 25)}, | |
| 256 | + {"key": "迪士尼", "doc_count": random.randint(8, 20)}, | |
| 257 | + {"key": "多美卡", "doc_count": random.randint(5, 15)}, | |
| 258 | + {"key": "孩之宝", "doc_count": random.randint(6, 18)}, | |
| 259 | + {"key": "万代", "doc_count": random.randint(3, 10)} | |
| 260 | + ] | |
| 261 | + elif agg_name == 'material_type': | |
| 262 | + buckets = [ | |
| 263 | + {"key": "塑料", "doc_count": random.randint(40, 80)}, | |
| 264 | + {"key": "布绒", "doc_count": random.randint(8, 20)}, | |
| 265 | + {"key": "金属", "doc_count": random.randint(5, 15)}, | |
| 266 | + {"key": "木质", "doc_count": random.randint(3, 12)} | |
| 267 | + ] | |
| 268 | + else: | |
| 269 | + # Generic terms aggregation | |
| 270 | + buckets = [ | |
| 271 | + {"key": f"选项{i+1}", "doc_count": random.randint(5, 25)} | |
| 272 | + for i in range(5) | |
| 273 | + ] | |
| 274 | + | |
| 275 | + result[agg_name] = { | |
| 276 | + "doc_count_error_upper_bound": 0, | |
| 277 | + "sum_other_doc_count": random.randint(10, 50), | |
| 278 | + "buckets": buckets | |
| 279 | + } | |
| 280 | + | |
| 281 | + elif agg_type == 'range': | |
| 282 | + # Generate mock range aggregation (usually for price) | |
| 283 | + if agg_name == 'price_ranges': | |
| 284 | + ranges = agg_spec.get('ranges', []) | |
| 285 | + buckets = [] | |
| 286 | + for range_spec in ranges: | |
| 287 | + key = range_spec.get('key', 'unknown') | |
| 288 | + count = random.randint(5, 30) | |
| 289 | + bucket_data = {"key": key, "doc_count": count} | |
| 290 | + | |
| 291 | + # Add range bounds | |
| 292 | + if 'to' in range_spec: | |
| 293 | + bucket_data['to'] = range_spec['to'] | |
| 294 | + if 'from' in range_spec: | |
| 295 | + bucket_data['from'] = range_spec['from'] | |
| 296 | + | |
| 297 | + buckets.append(bucket_data) | |
| 298 | + | |
| 299 | + result[agg_name] = {"buckets": buckets} | |
| 300 | + | |
| 301 | + return result | |
| 302 | + | |
| 303 | + def log_message(self, format, *args): | |
| 304 | + """Override to reduce log noise.""" | |
| 305 | + pass | |
| 306 | + | |
| 307 | +def run_server(): | |
| 308 | + """Run the API server.""" | |
| 309 | + server_address = ('', 6002) | |
| 310 | + httpd = HTTPServer(server_address, SearchAPIHandler) | |
| 311 | + print("🚀 Simple Search API Server started!") | |
| 312 | + print("📍 API: http://localhost:6002") | |
| 313 | + print("🔍 Search endpoint: http://localhost:6002/search/") | |
| 314 | + print("🌐 Frontend should connect to: http://localhost:6002") | |
| 315 | + print("⏹️ Press Ctrl+C to stop") | |
| 316 | + | |
| 317 | + try: | |
| 318 | + httpd.serve_forever() | |
| 319 | + except KeyboardInterrupt: | |
| 320 | + print("\n🛑 Server stopped") | |
| 321 | + httpd.server_close() | |
| 322 | + | |
| 323 | +def run_server(): | |
| 324 | + """Run the API server - main entry point.""" | |
| 325 | + server_address = ('', 6002) | |
| 326 | + httpd = HTTPServer(server_address, SearchAPIHandler) | |
| 327 | + print("🚀 Simple Search API Server started!") | |
| 328 | + print("📍 API: http://localhost:6002") | |
| 329 | + print("🔍 Search endpoint: http://localhost:6002/search/") | |
| 330 | + print("🌐 Frontend should connect to: http://localhost:6002") | |
| 331 | + print("⏹️ Press Ctrl+C to stop") | |
| 332 | + | |
| 333 | + try: | |
| 334 | + httpd.serve_forever() | |
| 335 | + except KeyboardInterrupt: | |
| 336 | + print("\n🛑 Server stopped") | |
| 337 | + httpd.server_close() | |
| 338 | + | |
| 339 | +if __name__ == '__main__': | |
| 340 | + run_server() | |
| 0 | 341 | \ No newline at end of file | ... | ... |
| ... | ... | @@ -0,0 +1,166 @@ |
| 1 | +#!/usr/bin/env python3 | |
| 2 | +""" | |
| 3 | +Test script for aggregation functionality | |
| 4 | +""" | |
| 5 | + | |
| 6 | +import requests | |
| 7 | +import json | |
| 8 | + | |
| 9 | +API_BASE_URL = 'http://120.76.41.98:6002' | |
| 10 | + | |
| 11 | +def test_search_with_aggregations(): | |
| 12 | + """Test search with aggregations""" | |
| 13 | + | |
| 14 | + # Test data | |
| 15 | + test_query = { | |
| 16 | + "query": "玩具", | |
| 17 | + "size": 5, | |
| 18 | + "aggregations": { | |
| 19 | + "category_stats": { | |
| 20 | + "terms": { | |
| 21 | + "field": "categoryName_keyword", | |
| 22 | + "size": 10 | |
| 23 | + } | |
| 24 | + }, | |
| 25 | + "brand_stats": { | |
| 26 | + "terms": { | |
| 27 | + "field": "brandName_keyword", | |
| 28 | + "size": 10 | |
| 29 | + } | |
| 30 | + }, | |
| 31 | + "price_ranges": { | |
| 32 | + "range": { | |
| 33 | + "field": "price", | |
| 34 | + "ranges": [ | |
| 35 | + {"key": "0-50", "to": 50}, | |
| 36 | + {"key": "50-100", "from": 50, "to": 100}, | |
| 37 | + {"key": "100-200", "from": 100, "to": 200}, | |
| 38 | + {"key": "200+", "from": 200} | |
| 39 | + ] | |
| 40 | + } | |
| 41 | + } | |
| 42 | + } | |
| 43 | + } | |
| 44 | + | |
| 45 | + print("Testing search with aggregations...") | |
| 46 | + print(f"Query: {json.dumps(test_query, indent=2, ensure_ascii=False)}") | |
| 47 | + | |
| 48 | + try: | |
| 49 | + response = requests.post(f"{API_BASE_URL}/search/", | |
| 50 | + json=test_query, | |
| 51 | + headers={'Content-Type': 'application/json'}) | |
| 52 | + | |
| 53 | + print(f"Status Code: {response.status_code}") | |
| 54 | + | |
| 55 | + if response.ok: | |
| 56 | + data = response.json() | |
| 57 | + print(f"Found {data['total']} results in {data['took_ms']}ms") | |
| 58 | + print(f"Max Score: {data['max_score']}") | |
| 59 | + | |
| 60 | + # Print aggregations | |
| 61 | + if data.get('aggregations'): | |
| 62 | + print("\nAggregations:") | |
| 63 | + for agg_name, agg_result in data['aggregations'].items(): | |
| 64 | + print(f"\n{agg_name}:") | |
| 65 | + if 'buckets' in agg_result: | |
| 66 | + for bucket in agg_result['buckets'][:5]: # Show first 5 buckets | |
| 67 | + print(f" - {bucket['key']}: {bucket['doc_count']}") | |
| 68 | + | |
| 69 | + # Print first few results | |
| 70 | + print(f"\nFirst 3 results:") | |
| 71 | + for i, hit in enumerate(data['hits'][:3]): | |
| 72 | + source = hit['_source'] | |
| 73 | + print(f"\n{i+1}. {source.get('name', 'N/A')}") | |
| 74 | + print(f" Category: {source.get('categoryName', 'N/A')}") | |
| 75 | + print(f" Brand: {source.get('brandName', 'N/A')}") | |
| 76 | + print(f" Price: {source.get('price', 'N/A')}") | |
| 77 | + print(f" Score: {hit['_score']:.4f}") | |
| 78 | + else: | |
| 79 | + print(f"Error: {response.status_code}") | |
| 80 | + print(f"Response: {response.text}") | |
| 81 | + | |
| 82 | + except Exception as e: | |
| 83 | + print(f"Request failed: {e}") | |
| 84 | + | |
| 85 | +def test_search_with_filters(): | |
| 86 | + """Test search with filters""" | |
| 87 | + | |
| 88 | + test_filters = { | |
| 89 | + "query": "玩具", | |
| 90 | + "size": 5, | |
| 91 | + "filters": { | |
| 92 | + "categoryName_keyword": ["玩具"], | |
| 93 | + "price_ranges": ["0-50", "50-100"] | |
| 94 | + } | |
| 95 | + } | |
| 96 | + | |
| 97 | + print("\n\nTesting search with filters...") | |
| 98 | + print(f"Query: {json.dumps(test_filters, indent=2, ensure_ascii=False)}") | |
| 99 | + | |
| 100 | + try: | |
| 101 | + response = requests.post(f"{API_BASE_URL}/search/", | |
| 102 | + json=test_filters, | |
| 103 | + headers={'Content-Type': 'application/json'}) | |
| 104 | + | |
| 105 | + print(f"Status Code: {response.status_code}") | |
| 106 | + | |
| 107 | + if response.ok: | |
| 108 | + data = response.json() | |
| 109 | + print(f"Found {data['total']} results in {data['took_ms']}ms") | |
| 110 | + | |
| 111 | + print(f"\nFirst 3 results:") | |
| 112 | + for i, hit in enumerate(data['hits'][:3]): | |
| 113 | + source = hit['_source'] | |
| 114 | + print(f"\n{i+1}. {source.get('name', 'N/A')}") | |
| 115 | + print(f" Category: {source.get('categoryName', 'N/A')}") | |
| 116 | + print(f" Brand: {source.get('brandName', 'N/A')}") | |
| 117 | + print(f" Price: {source.get('price', 'N/A')}") | |
| 118 | + print(f" Score: {hit['_score']:.4f}") | |
| 119 | + else: | |
| 120 | + print(f"Error: {response.status_code}") | |
| 121 | + print(f"Response: {response.text}") | |
| 122 | + | |
| 123 | + except Exception as e: | |
| 124 | + print(f"Request failed: {e}") | |
| 125 | + | |
| 126 | +def test_search_with_sorting(): | |
| 127 | + """Test search with sorting""" | |
| 128 | + | |
| 129 | + test_sort = { | |
| 130 | + "query": "玩具", | |
| 131 | + "size": 5, | |
| 132 | + "sort_by": "price", | |
| 133 | + "sort_order": "asc" | |
| 134 | + } | |
| 135 | + | |
| 136 | + print("\n\nTesting search with sorting (price ascending)...") | |
| 137 | + print(f"Query: {json.dumps(test_sort, indent=2, ensure_ascii=False)}") | |
| 138 | + | |
| 139 | + try: | |
| 140 | + response = requests.post(f"{API_BASE_URL}/search/", | |
| 141 | + json=test_sort, | |
| 142 | + headers={'Content-Type': 'application/json'}) | |
| 143 | + | |
| 144 | + print(f"Status Code: {response.status_code}") | |
| 145 | + | |
| 146 | + if response.ok: | |
| 147 | + data = response.json() | |
| 148 | + print(f"Found {data['total']} results in {data['took_ms']}ms") | |
| 149 | + | |
| 150 | + print(f"\nFirst 3 results (sorted by price):") | |
| 151 | + for i, hit in enumerate(data['hits'][:3]): | |
| 152 | + source = hit['_source'] | |
| 153 | + print(f"\n{i+1}. {source.get('name', 'N/A')}") | |
| 154 | + print(f" Price: {source.get('price', 'N/A')}") | |
| 155 | + print(f" Score: {hit['_score']:.4f}") | |
| 156 | + else: | |
| 157 | + print(f"Error: {response.status_code}") | |
| 158 | + print(f"Response: {response.text}") | |
| 159 | + | |
| 160 | + except Exception as e: | |
| 161 | + print(f"Request failed: {e}") | |
| 162 | + | |
| 163 | +if __name__ == "__main__": | |
| 164 | + test_search_with_aggregations() | |
| 165 | + test_search_with_filters() | |
| 166 | + test_search_with_sorting() | |
| 0 | 167 | \ No newline at end of file | ... | ... |
| ... | ... | @@ -0,0 +1,236 @@ |
| 1 | +#!/usr/bin/env python3 | |
| 2 | +""" | |
| 3 | +Simple test script to verify aggregation functionality without external dependencies. | |
| 4 | +""" | |
| 5 | + | |
| 6 | +import sys | |
| 7 | +import os | |
| 8 | + | |
| 9 | +# Add the project root to the Python path | |
| 10 | +sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) | |
| 11 | + | |
| 12 | +def test_es_query_builder_aggregations(): | |
| 13 | + """Test the ES query builder aggregation methods.""" | |
| 14 | + print("Testing ES Query Builder Aggregation Methods...") | |
| 15 | + | |
| 16 | + # Import the query builder | |
| 17 | + try: | |
| 18 | + from search.es_query_builder import ESQueryBuilder | |
| 19 | + print("✓ ESQueryBuilder imported successfully") | |
| 20 | + except ImportError as e: | |
| 21 | + print(f"✗ Failed to import ESQueryBuilder: {e}") | |
| 22 | + return False | |
| 23 | + | |
| 24 | + # Create a query builder instance | |
| 25 | + builder = ESQueryBuilder( | |
| 26 | + index_name="test_index", | |
| 27 | + match_fields=["name", "description"] | |
| 28 | + ) | |
| 29 | + | |
| 30 | + # Test basic aggregation | |
| 31 | + es_query = {"query": {"match_all": {}}} | |
| 32 | + | |
| 33 | + # Test add_dynamic_aggregations | |
| 34 | + aggregations = { | |
| 35 | + "category_name": { | |
| 36 | + "type": "terms", | |
| 37 | + "field": "categoryName_keyword", | |
| 38 | + "size": 10 | |
| 39 | + }, | |
| 40 | + "price_ranges": { | |
| 41 | + "type": "range", | |
| 42 | + "field": "price", | |
| 43 | + "ranges": [ | |
| 44 | + {"key": "0-50", "to": 50}, | |
| 45 | + {"key": "50-100", "from": 50, "to": 100} | |
| 46 | + ] | |
| 47 | + } | |
| 48 | + } | |
| 49 | + | |
| 50 | + result_query = builder.add_dynamic_aggregations(es_query, aggregations) | |
| 51 | + | |
| 52 | + if "aggs" in result_query: | |
| 53 | + print("✓ Aggregations added to query") | |
| 54 | + | |
| 55 | + # Check category aggregation | |
| 56 | + if "category_name" in result_query["aggs"]: | |
| 57 | + category_agg = result_query["aggs"]["category_name"] | |
| 58 | + if "terms" in category_agg and category_agg["terms"]["field"] == "categoryName_keyword": | |
| 59 | + print("✓ Category aggregation correctly configured") | |
| 60 | + else: | |
| 61 | + print("✗ Category aggregation incorrectly configured") | |
| 62 | + return False | |
| 63 | + | |
| 64 | + # Check price range aggregation | |
| 65 | + if "price_ranges" in result_query["aggs"]: | |
| 66 | + price_agg = result_query["aggs"]["price_ranges"] | |
| 67 | + if "range" in price_agg and price_agg["range"]["field"] == "price": | |
| 68 | + print("✓ Price range aggregation correctly configured") | |
| 69 | + else: | |
| 70 | + print("✗ Price range aggregation incorrectly configured") | |
| 71 | + return False | |
| 72 | + else: | |
| 73 | + print("✗ No aggregations added to query") | |
| 74 | + return False | |
| 75 | + | |
| 76 | + # Test sorting | |
| 77 | + result_query_asc = builder.add_sorting({}, "price_asc") | |
| 78 | + if "sort" in result_query_asc: | |
| 79 | + print("✓ Price ascending sort added") | |
| 80 | + else: | |
| 81 | + print("✗ Price ascending sort not added") | |
| 82 | + return False | |
| 83 | + | |
| 84 | + result_query_desc = builder.add_sorting({}, "price_desc") | |
| 85 | + if "sort" in result_query_desc: | |
| 86 | + print("✓ Price descending sort added") | |
| 87 | + else: | |
| 88 | + print("✗ Price descending sort not added") | |
| 89 | + return False | |
| 90 | + | |
| 91 | + result_query_time = builder.add_sorting({}, "time_desc") | |
| 92 | + if "sort" in result_query_time: | |
| 93 | + print("✓ Time descending sort added") | |
| 94 | + else: | |
| 95 | + print("✗ Time descending sort not added") | |
| 96 | + return False | |
| 97 | + | |
| 98 | + return True | |
| 99 | + | |
| 100 | + | |
| 101 | +def test_searcher_integration(): | |
| 102 | + """Test searcher integration with new parameters.""" | |
| 103 | + print("\nTesting Searcher Integration...") | |
| 104 | + | |
| 105 | + try: | |
| 106 | + from search.searcher import Searcher | |
| 107 | + print("✓ Searcher imported successfully") | |
| 108 | + except ImportError as e: | |
| 109 | + print(f"✗ Failed to import Searcher: {e}") | |
| 110 | + return False | |
| 111 | + | |
| 112 | + # We can't easily test the full searcher without ES, but we can check the method signature | |
| 113 | + import inspect | |
| 114 | + search_method = getattr(Searcher, 'search', None) | |
| 115 | + | |
| 116 | + if search_method: | |
| 117 | + sig = inspect.signature(search_method) | |
| 118 | + params = list(sig.parameters.keys()) | |
| 119 | + | |
| 120 | + expected_params = ['query', 'size', 'from_', 'filters', 'min_score', 'aggregations', 'sort_by', 'context'] | |
| 121 | + for param in expected_params: | |
| 122 | + if param in params: | |
| 123 | + print(f"✓ Parameter '{param}' found in search method") | |
| 124 | + else: | |
| 125 | + print(f"✗ Parameter '{param}' missing from search method") | |
| 126 | + return False | |
| 127 | + else: | |
| 128 | + print("✗ Search method not found in Searcher class") | |
| 129 | + return False | |
| 130 | + | |
| 131 | + return True | |
| 132 | + | |
| 133 | + | |
| 134 | +def test_api_route_integration(): | |
| 135 | + """Test API route integration.""" | |
| 136 | + print("\nTesting API Route Integration...") | |
| 137 | + | |
| 138 | + try: | |
| 139 | + from api.routes.search import router | |
| 140 | + print("✓ Search router imported successfully") | |
| 141 | + except ImportError as e: | |
| 142 | + print(f"✗ Failed to import search router: {e}") | |
| 143 | + return False | |
| 144 | + | |
| 145 | + # Check if the route exists | |
| 146 | + routes = [route.path for route in router.routes] | |
| 147 | + if "/" in routes: | |
| 148 | + print("✓ Main search route found") | |
| 149 | + else: | |
| 150 | + print("✗ Main search route not found") | |
| 151 | + return False | |
| 152 | + | |
| 153 | + return True | |
| 154 | + | |
| 155 | + | |
| 156 | +def test_configuration(): | |
| 157 | + """Test configuration parsing.""" | |
| 158 | + print("\nTesting Configuration...") | |
| 159 | + | |
| 160 | + try: | |
| 161 | + from config import CustomerConfig | |
| 162 | + print("✓ CustomerConfig imported successfully") | |
| 163 | + except ImportError as e: | |
| 164 | + print(f"✗ Failed to import CustomerConfig: {e}") | |
| 165 | + return False | |
| 166 | + | |
| 167 | + # Try to load the customer1 config | |
| 168 | + try: | |
| 169 | + config = CustomerConfig.load_from_file("config/schema/customer1_config.yaml") | |
| 170 | + print("✓ Customer1 configuration loaded successfully") | |
| 171 | + | |
| 172 | + # Check if price field is in the configuration | |
| 173 | + field_names = [field.name for field in config.fields] | |
| 174 | + if "price" in field_names: | |
| 175 | + print("✓ Price field found in configuration") | |
| 176 | + else: | |
| 177 | + print("✗ Price field not found in configuration") | |
| 178 | + return False | |
| 179 | + | |
| 180 | + # Check keyword fields for aggregations | |
| 181 | + if "categoryName_keyword" in field_names: | |
| 182 | + print("✓ Category keyword field found") | |
| 183 | + else: | |
| 184 | + print("✗ Category keyword field not found") | |
| 185 | + return False | |
| 186 | + | |
| 187 | + if "brandName_keyword" in field_names: | |
| 188 | + print("✓ Brand keyword field found") | |
| 189 | + else: | |
| 190 | + print("✗ Brand keyword field not found") | |
| 191 | + return False | |
| 192 | + | |
| 193 | + except Exception as e: | |
| 194 | + print(f"✗ Failed to load configuration: {e}") | |
| 195 | + return False | |
| 196 | + | |
| 197 | + return True | |
| 198 | + | |
| 199 | + | |
| 200 | +def main(): | |
| 201 | + """Run all tests.""" | |
| 202 | + print("=== Search Engine Aggregation Functionality Tests ===\n") | |
| 203 | + | |
| 204 | + tests = [ | |
| 205 | + test_es_query_builder_aggregations, | |
| 206 | + test_searcher_integration, | |
| 207 | + test_api_route_integration, | |
| 208 | + test_configuration | |
| 209 | + ] | |
| 210 | + | |
| 211 | + passed = 0 | |
| 212 | + total = len(tests) | |
| 213 | + | |
| 214 | + for test in tests: | |
| 215 | + try: | |
| 216 | + if test(): | |
| 217 | + passed += 1 | |
| 218 | + print(f"✓ {test.__name__} PASSED") | |
| 219 | + else: | |
| 220 | + print(f"✗ {test.__name__} FAILED") | |
| 221 | + except Exception as e: | |
| 222 | + print(f"✗ {test.__name__} ERROR: {e}") | |
| 223 | + | |
| 224 | + print(f"\n=== Test Results: {passed}/{total} tests passed ===") | |
| 225 | + | |
| 226 | + if passed == total: | |
| 227 | + print("🎉 All tests passed! Aggregation functionality is ready.") | |
| 228 | + return True | |
| 229 | + else: | |
| 230 | + print("❌ Some tests failed. Please check the implementation.") | |
| 231 | + return False | |
| 232 | + | |
| 233 | + | |
| 234 | +if __name__ == "__main__": | |
| 235 | + success = main() | |
| 236 | + sys.exit(0 if success else 1) | |
| 0 | 237 | \ No newline at end of file | ... | ... |
| ... | ... | @@ -0,0 +1,211 @@ |
| 1 | +#!/usr/bin/env python3 | |
| 2 | +""" | |
| 3 | +Complete test script simulating frontend search interaction | |
| 4 | +""" | |
| 5 | + | |
| 6 | +import requests | |
| 7 | +import json | |
| 8 | + | |
| 9 | +API_BASE_URL = 'http://120.76.41.98:6002' | |
| 10 | + | |
| 11 | +def test_complete_search_workflow(): | |
| 12 | + """Test complete search workflow similar to frontend""" | |
| 13 | + | |
| 14 | + print("=" * 60) | |
| 15 | + print("完整搜索流程测试") | |
| 16 | + print("=" * 60) | |
| 17 | + | |
| 18 | + # Step 1: Initial search with aggregations | |
| 19 | + print("\n1️⃣ 初始搜索(带聚合功能)") | |
| 20 | + print("-" * 30) | |
| 21 | + | |
| 22 | + search_request = { | |
| 23 | + "query": "芭比娃娃", | |
| 24 | + "size": 10, | |
| 25 | + "aggregations": { | |
| 26 | + "category_stats": { | |
| 27 | + "terms": { | |
| 28 | + "field": "categoryName_keyword", | |
| 29 | + "size": 10 | |
| 30 | + } | |
| 31 | + }, | |
| 32 | + "brand_stats": { | |
| 33 | + "terms": { | |
| 34 | + "field": "brandName_keyword", | |
| 35 | + "size": 10 | |
| 36 | + } | |
| 37 | + }, | |
| 38 | + "price_ranges": { | |
| 39 | + "range": { | |
| 40 | + "field": "price", | |
| 41 | + "ranges": [ | |
| 42 | + {"key": "0-50", "to": 50}, | |
| 43 | + {"key": "50-100", "from": 50, "to": 100}, | |
| 44 | + {"key": "100-200", "from": 100, "to": 200}, | |
| 45 | + {"key": "200+", "from": 200} | |
| 46 | + ] | |
| 47 | + } | |
| 48 | + } | |
| 49 | + } | |
| 50 | + } | |
| 51 | + | |
| 52 | + try: | |
| 53 | + response = requests.post(f"{API_BASE_URL}/search/", json=search_request) | |
| 54 | + | |
| 55 | + if response.ok: | |
| 56 | + data = response.json() | |
| 57 | + print(f"✅ 找到 {data['total']} 个结果,耗时 {data['took_ms']}ms") | |
| 58 | + | |
| 59 | + # Show aggregations results | |
| 60 | + if data.get('aggregations'): | |
| 61 | + print("\n📊 聚合结果:") | |
| 62 | + | |
| 63 | + # Category aggregations | |
| 64 | + if 'category_stats' in data['aggregations']: | |
| 65 | + print(" 🏷️ 分类统计:") | |
| 66 | + for bucket in data['aggregations']['category_stats']['buckets'][:3]: | |
| 67 | + print(f" - {bucket['key']}: {bucket['doc_count']} 个商品") | |
| 68 | + | |
| 69 | + # Brand aggregations | |
| 70 | + if 'brand_stats' in data['aggregations']: | |
| 71 | + print(" 🏢 品牌统计:") | |
| 72 | + for bucket in data['aggregations']['brand_stats']['buckets'][:3]: | |
| 73 | + print(f" - {bucket['key']}: {bucket['doc_count']} 个商品") | |
| 74 | + | |
| 75 | + # Price ranges | |
| 76 | + if 'price_ranges' in data['aggregations']: | |
| 77 | + print(" 💰 价格分布:") | |
| 78 | + for bucket in data['aggregations']['price_ranges']['buckets']: | |
| 79 | + print(f" - {bucket['key']}: {bucket['doc_count']} 个商品") | |
| 80 | + | |
| 81 | + # Show sample results | |
| 82 | + print(f"\n🔍 前3个搜索结果:") | |
| 83 | + for i, hit in enumerate(data['hits'][:3]): | |
| 84 | + source = hit['_source'] | |
| 85 | + price = source.get('price', 'N/A') | |
| 86 | + category = source.get('categoryName', 'N/A') | |
| 87 | + brand = source.get('brandName', 'N/A') | |
| 88 | + print(f" {i+1}. {source.get('name', 'N/A')}") | |
| 89 | + print(f" 💰 价格: {price}") | |
| 90 | + print(f" 📁 分类: {category}") | |
| 91 | + print(f" 🏷️ 品牌: {brand}") | |
| 92 | + print(f" ⭐ 评分: {hit['_score']:.3f}") | |
| 93 | + print() | |
| 94 | + | |
| 95 | + else: | |
| 96 | + print(f"❌ 搜索失败: {response.status_code}") | |
| 97 | + print(f"错误信息: {response.text}") | |
| 98 | + | |
| 99 | + except Exception as e: | |
| 100 | + print(f"❌ 请求异常: {e}") | |
| 101 | + | |
| 102 | + # Step 2: Search with filters | |
| 103 | + print("\n2️⃣ 带过滤条件的搜索") | |
| 104 | + print("-" * 30) | |
| 105 | + | |
| 106 | + filtered_search = { | |
| 107 | + "query": "芭比娃娃", | |
| 108 | + "size": 5, | |
| 109 | + "filters": { | |
| 110 | + "brandName_keyword": ["美泰"], | |
| 111 | + "price_ranges": ["50-100", "100-200"] | |
| 112 | + } | |
| 113 | + } | |
| 114 | + | |
| 115 | + try: | |
| 116 | + response = requests.post(f"{API_BASE_URL}/search/", json=filtered_search) | |
| 117 | + | |
| 118 | + if response.ok: | |
| 119 | + data = response.json() | |
| 120 | + print(f"✅ 过滤后找到 {data['total']} 个结果,耗时 {data['took_ms']}ms") | |
| 121 | + print(" 🎯 过滤条件: 品牌=美泰, 价格=¥50-200") | |
| 122 | + | |
| 123 | + print(f"\n💫 前3个过滤结果:") | |
| 124 | + for i, hit in enumerate(data['hits'][:3]): | |
| 125 | + source = hit['_source'] | |
| 126 | + price = source.get('price', 'N/A') | |
| 127 | + category = source.get('categoryName', 'N/A') | |
| 128 | + brand = source.get('brandName', 'N/A') | |
| 129 | + print(f" {i+1}. {source.get('name', 'N/A')}") | |
| 130 | + print(f" 💰 ¥{price} | 📁 {category} | 🏷️ {brand}") | |
| 131 | + print(f" ⭐ 评分: {hit['_score']:.3f}") | |
| 132 | + | |
| 133 | + else: | |
| 134 | + print(f"❌ 过滤搜索失败: {response.status_code}") | |
| 135 | + | |
| 136 | + except Exception as e: | |
| 137 | + print(f"❌ 请求异常: {e}") | |
| 138 | + | |
| 139 | + # Step 3: Search with sorting | |
| 140 | + print("\n3️⃣ 排序搜索") | |
| 141 | + print("-" * 30) | |
| 142 | + | |
| 143 | + # Test price ascending | |
| 144 | + price_asc_search = { | |
| 145 | + "query": "芭比娃娃", | |
| 146 | + "size": 3, | |
| 147 | + "sort_by": "price", | |
| 148 | + "sort_order": "asc" | |
| 149 | + } | |
| 150 | + | |
| 151 | + try: | |
| 152 | + response = requests.post(f"{API_BASE_URL}/search/", json=price_asc_search) | |
| 153 | + | |
| 154 | + if response.ok: | |
| 155 | + data = response.json() | |
| 156 | + print(f"✅ 价格升序排序,找到 {data['total']} 个结果") | |
| 157 | + print(" 📈 排序方式: 价格从低到高") | |
| 158 | + | |
| 159 | + print(f"\n💵 价格排序结果:") | |
| 160 | + for i, hit in enumerate(data['hits']): | |
| 161 | + source = hit['_source'] | |
| 162 | + price = source.get('price', 'N/A') | |
| 163 | + name = source.get('name', 'N/A') | |
| 164 | + print(f" {i+1}. ¥{price} - {name}") | |
| 165 | + | |
| 166 | + else: | |
| 167 | + print(f"❌ 排序搜索失败: {response.status_code}") | |
| 168 | + | |
| 169 | + except Exception as e: | |
| 170 | + print(f"❌ 请求异常: {e}") | |
| 171 | + | |
| 172 | + # Step 4: Test time sorting | |
| 173 | + print("\n4️⃣ 时间排序测试") | |
| 174 | + print("-" * 30) | |
| 175 | + | |
| 176 | + time_sort_search = { | |
| 177 | + "query": "芭比娃娃", | |
| 178 | + "size": 3, | |
| 179 | + "sort_by": "create_time", | |
| 180 | + "sort_order": "desc" | |
| 181 | + } | |
| 182 | + | |
| 183 | + try: | |
| 184 | + response = requests.post(f"{API_BASE_URL}/search/", json=time_sort_search) | |
| 185 | + | |
| 186 | + if response.ok: | |
| 187 | + data = response.json() | |
| 188 | + print(f"✅ 时间降序排序,找到 {data['total']} 个结果") | |
| 189 | + print(" 📅 排序方式: 上架时间从新到旧") | |
| 190 | + | |
| 191 | + print(f"\n🕐 时间排序结果:") | |
| 192 | + for i, hit in enumerate(data['hits']): | |
| 193 | + source = hit['_source'] | |
| 194 | + create_time = source.get('create_time', 'N/A') | |
| 195 | + name = source.get('name', 'N/A') | |
| 196 | + print(f" {i+1}. {create_time} - {name}") | |
| 197 | + | |
| 198 | + else: | |
| 199 | + print(f"❌ 时间排序失败: {response.status_code}") | |
| 200 | + | |
| 201 | + except Exception as e: | |
| 202 | + print(f"❌ 请求异常: {e}") | |
| 203 | + | |
| 204 | + print("\n" + "=" * 60) | |
| 205 | + print("🎉 搜索功能测试完成!") | |
| 206 | + print("✨ 前端访问地址: http://localhost:8080") | |
| 207 | + print("🔧 后端API地址: http://120.76.41.98:6002") | |
| 208 | + print("=" * 60) | |
| 209 | + | |
| 210 | +if __name__ == "__main__": | |
| 211 | + test_complete_search_workflow() | |
| 0 | 212 | \ No newline at end of file | ... | ... |
| ... | ... | @@ -0,0 +1,45 @@ |
| 1 | +#!/usr/bin/env python3 | |
| 2 | +""" | |
| 3 | +Minimal test to isolate sort issue | |
| 4 | +""" | |
| 5 | + | |
| 6 | +import requests | |
| 7 | +import json | |
| 8 | + | |
| 9 | +def test_minimal_sort(): | |
| 10 | + """Test minimal sort case""" | |
| 11 | + | |
| 12 | + base_url = "http://120.76.41.98:6002" | |
| 13 | + | |
| 14 | + # Test 1: No sort parameters | |
| 15 | + print("Test 1: No sort parameters") | |
| 16 | + response = requests.post(f"{base_url}/search/", json={"query": "test", "size": 1}) | |
| 17 | + print(f"Status: {response.status_code}") | |
| 18 | + print(f"Response: {response.text[:200]}...") | |
| 19 | + | |
| 20 | + # Test 2: Empty sort_by | |
| 21 | + print("\nTest 2: Empty sort_by") | |
| 22 | + response = requests.post(f"{base_url}/search/", json={"query": "test", "size": 1, "sort_by": ""}) | |
| 23 | + print(f"Status: {response.status_code}") | |
| 24 | + print(f"Response: {response.text[:200]}...") | |
| 25 | + | |
| 26 | + # Test 3: sort_by only (no sort_order) | |
| 27 | + print("\nTest 3: sort_by only") | |
| 28 | + response = requests.post(f"{base_url}/search/", json={"query": "test", "size": 1, "sort_by": "create_time"}) | |
| 29 | + print(f"Status: {response.status_code}") | |
| 30 | + print(f"Response: {response.text[:200]}...") | |
| 31 | + | |
| 32 | + # Test 4: sort_order only (no sort_by) | |
| 33 | + print("\nTest 4: sort_order only") | |
| 34 | + response = requests.post(f"{base_url}/search/", json={"query": "test", "size": 1, "sort_order": "desc"}) | |
| 35 | + print(f"Status: {response.status_code}") | |
| 36 | + print(f"Response: {response.text[:200]}...") | |
| 37 | + | |
| 38 | + # Test 5: Both parameters with None values | |
| 39 | + print("\nTest 5: Both parameters with null values") | |
| 40 | + response = requests.post(f"{base_url}/search/", json={"query": "test", "size": 1, "sort_by": None, "sort_order": None}) | |
| 41 | + print(f"Status: {response.status_code}") | |
| 42 | + print(f"Response: {response.text[:200]}...") | |
| 43 | + | |
| 44 | +if __name__ == "__main__": | |
| 45 | + test_minimal_sort() | |
| 0 | 46 | \ No newline at end of file | ... | ... |
| ... | ... | @@ -0,0 +1,256 @@ |
| 1 | +""" | |
| 2 | +Tests for aggregation API functionality. | |
| 3 | +""" | |
| 4 | + | |
| 5 | +import pytest | |
| 6 | +from fastapi.testclient import TestClient | |
| 7 | +from api.app import app | |
| 8 | + | |
| 9 | +client = TestClient(app) | |
| 10 | + | |
| 11 | + | |
| 12 | +@pytest.mark.integration | |
| 13 | +@pytest.mark.api | |
| 14 | +def test_search_with_aggregations(): | |
| 15 | + """Test search with dynamic aggregations.""" | |
| 16 | + request_data = { | |
| 17 | + "query": "芭比娃娃", | |
| 18 | + "size": 10, | |
| 19 | + "aggregations": { | |
| 20 | + "category_name": { | |
| 21 | + "type": "terms", | |
| 22 | + "field": "categoryName_keyword", | |
| 23 | + "size": 10 | |
| 24 | + }, | |
| 25 | + "brand_name": { | |
| 26 | + "type": "terms", | |
| 27 | + "field": "brandName_keyword", | |
| 28 | + "size": 10 | |
| 29 | + }, | |
| 30 | + "price_ranges": { | |
| 31 | + "type": "range", | |
| 32 | + "field": "price", | |
| 33 | + "ranges": [ | |
| 34 | + {"key": "0-50", "to": 50}, | |
| 35 | + {"key": "50-100", "from": 50, "to": 100}, | |
| 36 | + {"key": "100-200", "from": 100, "to": 200}, | |
| 37 | + {"key": "200+", "from": 200} | |
| 38 | + ] | |
| 39 | + } | |
| 40 | + } | |
| 41 | + } | |
| 42 | + | |
| 43 | + response = client.post("/search/", json=request_data) | |
| 44 | + | |
| 45 | + assert response.status_code == 200 | |
| 46 | + data = response.json() | |
| 47 | + | |
| 48 | + # Check basic search response structure | |
| 49 | + assert "hits" in data | |
| 50 | + assert "total" in data | |
| 51 | + assert "aggregations" in data | |
| 52 | + assert "query_info" in data | |
| 53 | + | |
| 54 | + # Check aggregations structure | |
| 55 | + aggregations = data["aggregations"] | |
| 56 | + | |
| 57 | + # Should have category aggregations | |
| 58 | + if "category_name" in aggregations: | |
| 59 | + assert "buckets" in aggregations["category_name"] | |
| 60 | + assert isinstance(aggregations["category_name"]["buckets"], list) | |
| 61 | + | |
| 62 | + # Should have brand aggregations | |
| 63 | + if "brand_name" in aggregations: | |
| 64 | + assert "buckets" in aggregations["brand_name"] | |
| 65 | + assert isinstance(aggregations["brand_name"]["buckets"], list) | |
| 66 | + | |
| 67 | + # Should have price range aggregations | |
| 68 | + if "price_ranges" in aggregations: | |
| 69 | + assert "buckets" in aggregations["price_ranges"] | |
| 70 | + assert isinstance(aggregations["price_ranges"]["buckets"], list) | |
| 71 | + | |
| 72 | + | |
| 73 | +@pytest.mark.integration | |
| 74 | +@pytest.mark.api | |
| 75 | +def test_search_with_sorting(): | |
| 76 | + """Test search with different sorting options.""" | |
| 77 | + | |
| 78 | + # Test price ascending | |
| 79 | + request_data = { | |
| 80 | + "query": "玩具", | |
| 81 | + "size": 5, | |
| 82 | + "sort_by": "price_asc" | |
| 83 | + } | |
| 84 | + | |
| 85 | + response = client.post("/search/", json=request_data) | |
| 86 | + assert response.status_code == 200 | |
| 87 | + data = response.json() | |
| 88 | + | |
| 89 | + if data["hits"] and len(data["hits"]) > 1: | |
| 90 | + # Check if results are sorted by price (ascending) | |
| 91 | + prices = [] | |
| 92 | + for hit in data["hits"]: | |
| 93 | + if "_source" in hit and "price" in hit["_source"]: | |
| 94 | + prices.append(hit["_source"]["price"]) | |
| 95 | + | |
| 96 | + if len(prices) > 1: | |
| 97 | + assert prices == sorted(prices), "Results should be sorted by price ascending" | |
| 98 | + | |
| 99 | + # Test price descending | |
| 100 | + request_data["sort_by"] = "price_desc" | |
| 101 | + response = client.post("/search/", json=request_data) | |
| 102 | + assert response.status_code == 200 | |
| 103 | + data = response.json() | |
| 104 | + | |
| 105 | + if data["hits"] and len(data["hits"]) > 1: | |
| 106 | + prices = [] | |
| 107 | + for hit in data["hits"]: | |
| 108 | + if "_source" in hit and "price" in hit["_source"]: | |
| 109 | + prices.append(hit["_source"]["price"]) | |
| 110 | + | |
| 111 | + if len(prices) > 1: | |
| 112 | + assert prices == sorted(prices, reverse=True), "Results should be sorted by price descending" | |
| 113 | + | |
| 114 | + # Test time descending | |
| 115 | + request_data["sort_by"] = "time_desc" | |
| 116 | + response = client.post("/search/", json=request_data) | |
| 117 | + assert response.status_code == 200 | |
| 118 | + data = response.json() | |
| 119 | + | |
| 120 | + if data["hits"] and len(data["hits"]) > 1: | |
| 121 | + times = [] | |
| 122 | + for hit in data["hits"]: | |
| 123 | + if "_source" in hit and "create_time" in hit["_source"]: | |
| 124 | + times.append(hit["_source"]["create_time"]) | |
| 125 | + | |
| 126 | + if len(times) > 1: | |
| 127 | + # Newer items should come first | |
| 128 | + assert times == sorted(times, reverse=True), "Results should be sorted by time descending" | |
| 129 | + | |
| 130 | + | |
| 131 | +@pytest.mark.integration | |
| 132 | +@pytest.mark.api | |
| 133 | +def test_search_with_filters_and_aggregations(): | |
| 134 | + """Test search with filters and aggregations together.""" | |
| 135 | + request_data = { | |
| 136 | + "query": "玩具", | |
| 137 | + "size": 10, | |
| 138 | + "filters": { | |
| 139 | + "category_name": ["芭比"] | |
| 140 | + }, | |
| 141 | + "aggregations": { | |
| 142 | + "brand_name": { | |
| 143 | + "type": "terms", | |
| 144 | + "field": "brandName_keyword", | |
| 145 | + "size": 10 | |
| 146 | + } | |
| 147 | + } | |
| 148 | + } | |
| 149 | + | |
| 150 | + response = client.post("/search/", json=request_data) | |
| 151 | + assert response.status_code == 200 | |
| 152 | + data = response.json() | |
| 153 | + | |
| 154 | + # Check that results are filtered | |
| 155 | + assert "hits" in data | |
| 156 | + for hit in data["hits"]: | |
| 157 | + if "_source" in hit and "categoryName" in hit["_source"]: | |
| 158 | + assert "芭比" in hit["_source"]["categoryName"] | |
| 159 | + | |
| 160 | + # Check that aggregations are still present | |
| 161 | + assert "aggregations" in data | |
| 162 | + | |
| 163 | + | |
| 164 | +@pytest.mark.integration | |
| 165 | +@pytest.mark.api | |
| 166 | +def test_search_without_aggregations(): | |
| 167 | + """Test search without aggregations (default behavior).""" | |
| 168 | + request_data = { | |
| 169 | + "query": "玩具", | |
| 170 | + "size": 10 | |
| 171 | + } | |
| 172 | + | |
| 173 | + response = client.post("/search/", json=request_data) | |
| 174 | + assert response.status_code == 200 | |
| 175 | + data = response.json() | |
| 176 | + | |
| 177 | + # Should still have basic response structure | |
| 178 | + assert "hits" in data | |
| 179 | + assert "total" in data | |
| 180 | + assert "query_info" in data | |
| 181 | + | |
| 182 | + # Aggregations might be empty or not present without explicit request | |
| 183 | + assert "aggregations" in data | |
| 184 | + | |
| 185 | + | |
| 186 | +@pytest.mark.integration | |
| 187 | +@pytest.mark.api | |
| 188 | +def test_aggregation_edge_cases(): | |
| 189 | + """Test aggregation edge cases.""" | |
| 190 | + | |
| 191 | + # Test with empty query | |
| 192 | + request_data = { | |
| 193 | + "query": "", | |
| 194 | + "size": 10, | |
| 195 | + "aggregations": { | |
| 196 | + "category_name": { | |
| 197 | + "type": "terms", | |
| 198 | + "field": "categoryName_keyword", | |
| 199 | + "size": 10 | |
| 200 | + } | |
| 201 | + } | |
| 202 | + } | |
| 203 | + | |
| 204 | + response = client.post("/search/", json=request_data) | |
| 205 | + # Should handle empty query gracefully | |
| 206 | + assert response.status_code in [200, 422] | |
| 207 | + | |
| 208 | + # Test with invalid aggregation type | |
| 209 | + request_data = { | |
| 210 | + "query": "玩具", | |
| 211 | + "size": 10, | |
| 212 | + "aggregations": { | |
| 213 | + "invalid_agg": { | |
| 214 | + "type": "invalid_type", | |
| 215 | + "field": "categoryName_keyword", | |
| 216 | + "size": 10 | |
| 217 | + } | |
| 218 | + } | |
| 219 | + } | |
| 220 | + | |
| 221 | + response = client.post("/search/", json=request_data) | |
| 222 | + # Should handle invalid aggregation type gracefully | |
| 223 | + assert response.status_code in [200, 422] | |
| 224 | + | |
| 225 | + | |
| 226 | +@pytest.mark.unit | |
| 227 | +def test_aggregation_spec_validation(): | |
| 228 | + """Test aggregation specification validation.""" | |
| 229 | + from api.models import AggregationSpec | |
| 230 | + | |
| 231 | + # Test valid aggregation spec | |
| 232 | + agg_spec = AggregationSpec( | |
| 233 | + field="categoryName_keyword", | |
| 234 | + type="terms", | |
| 235 | + size=10 | |
| 236 | + ) | |
| 237 | + assert agg_spec.field == "categoryName_keyword" | |
| 238 | + assert agg_spec.type == "terms" | |
| 239 | + assert agg_spec.size == 10 | |
| 240 | + | |
| 241 | + # Test range aggregation spec | |
| 242 | + range_agg = AggregationSpec( | |
| 243 | + field="price", | |
| 244 | + type="range", | |
| 245 | + ranges=[ | |
| 246 | + {"key": "0-50", "to": 50}, | |
| 247 | + {"key": "50-100", "from": 50, "to": 100} | |
| 248 | + ] | |
| 249 | + ) | |
| 250 | + assert range_agg.field == "price" | |
| 251 | + assert range_agg.type == "range" | |
| 252 | + assert len(range_agg.ranges) == 2 | |
| 253 | + | |
| 254 | + | |
| 255 | +if __name__ == "__main__": | |
| 256 | + pytest.main([__file__]) | |
| 0 | 257 | \ No newline at end of file | ... | ... |