mirror of
				https://github.com/searxng/searxng.git
				synced 2025-10-31 02:27:06 -04:00 
			
		
		
		
	[pylint] searx/engines/duckduckgo_definitions.py
BTW: normalize indentations Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
This commit is contained in:
		
							parent
							
								
									21541f8a12
								
							
						
					
					
						commit
						343570f7fb
					
				| @ -1,6 +1,8 @@ | ||||
| # SPDX-License-Identifier: AGPL-3.0-or-later | ||||
| """ | ||||
|  DuckDuckGo (Instant Answer API) | ||||
| # lint: pylint | ||||
| # pylint: disable=missing-function-docstring | ||||
| """DuckDuckGo (Instant Answer API) | ||||
| 
 | ||||
| """ | ||||
| 
 | ||||
| import json | ||||
| @ -48,7 +50,7 @@ def is_broken_text(text): | ||||
| 
 | ||||
| 
 | ||||
| def result_to_text(text, htmlResult): | ||||
|     # TODO : remove result ending with "Meaning" or "Category" | ||||
|     # TODO : remove result ending with "Meaning" or "Category"  # pylint: disable=fixme | ||||
|     result = None | ||||
|     dom = html.fromstring(htmlResult) | ||||
|     a = dom.xpath('//a') | ||||
| @ -63,13 +65,18 @@ def result_to_text(text, htmlResult): | ||||
| 
 | ||||
| def request(query, params): | ||||
|     params['url'] = URL.format(query=urlencode({'q': query})) | ||||
|     language = match_language(params['language'], supported_languages, language_aliases) | ||||
|     language = match_language( | ||||
|         params['language'], | ||||
|         supported_languages,   # pylint: disable=undefined-variable | ||||
|         language_aliases | ||||
|     ) | ||||
|     language = language.split('-')[0] | ||||
|     params['headers']['Accept-Language'] = language | ||||
|     return params | ||||
| 
 | ||||
| 
 | ||||
| def response(resp): | ||||
|     # pylint: disable=too-many-locals, too-many-branches, too-many-statements | ||||
|     results = [] | ||||
| 
 | ||||
|     search_res = json.loads(resp.text) | ||||
| @ -124,17 +131,23 @@ def response(resp): | ||||
|             firstURL = ddg_result.get('FirstURL') | ||||
|             text = ddg_result.get('Text') | ||||
|             if not is_broken_text(text): | ||||
|                 suggestion = result_to_text(text, | ||||
|                                             ddg_result.get('Result')) | ||||
|                 suggestion = result_to_text( | ||||
|                     text, | ||||
|                     ddg_result.get('Result') | ||||
|                 ) | ||||
|                 if suggestion != heading and suggestion is not None: | ||||
|                     results.append({'suggestion': suggestion}) | ||||
|         elif 'Topics' in ddg_result: | ||||
|             suggestions = [] | ||||
|             relatedTopics.append({'name': ddg_result.get('Name', ''), | ||||
|                                   'suggestions': suggestions}) | ||||
|             relatedTopics.append({ | ||||
|                 'name': ddg_result.get('Name', ''), | ||||
|                 'suggestions': suggestions | ||||
|             }) | ||||
|             for topic_result in ddg_result.get('Topics', []): | ||||
|                 suggestion = result_to_text(topic_result.get('Text'), | ||||
|                                             topic_result.get('Result')) | ||||
|                 suggestion = result_to_text( | ||||
|                     topic_result.get('Text'), | ||||
|                     topic_result.get('Result') | ||||
|                 ) | ||||
|                 if suggestion != heading and suggestion is not None: | ||||
|                     suggestions.append(suggestion) | ||||
| 
 | ||||
| @ -143,19 +156,25 @@ def response(resp): | ||||
|     if abstractURL != '': | ||||
|         # add as result ? problem always in english | ||||
|         infobox_id = abstractURL | ||||
|         urls.append({'title': search_res.get('AbstractSource'), | ||||
|         urls.append({ | ||||
|             'title': search_res.get('AbstractSource'), | ||||
|             'url': abstractURL, | ||||
|                      'official': True}) | ||||
|         results.append({'url': abstractURL, | ||||
|                         'title': heading}) | ||||
|             'official': True | ||||
|         }) | ||||
|         results.append({ | ||||
|             'url': abstractURL, | ||||
|             'title': heading | ||||
|         }) | ||||
| 
 | ||||
|     # definition | ||||
|     definitionURL = search_res.get('DefinitionURL', '') | ||||
|     if definitionURL != '': | ||||
|         # add as result ? as answer ? problem always in english | ||||
|         infobox_id = definitionURL | ||||
|         urls.append({'title': search_res.get('DefinitionSource'), | ||||
|                      'url': definitionURL}) | ||||
|         urls.append({ | ||||
|             'title': search_res.get('DefinitionSource'), | ||||
|             'url': definitionURL | ||||
|         }) | ||||
| 
 | ||||
|     # to merge with wikidata's infobox | ||||
|     if infobox_id: | ||||
| @ -183,8 +202,10 @@ def response(resp): | ||||
|                 # * netflix_id | ||||
|                 external_url = get_external_url(data_type, data_value) | ||||
|                 if external_url is not None: | ||||
|                     urls.append({'title': data_label, | ||||
|                                  'url': external_url}) | ||||
|                     urls.append({ | ||||
|                         'title': data_label, | ||||
|                         'url': external_url | ||||
|                     }) | ||||
|                 elif data_type in ['instance', 'wiki_maps_trigger', 'google_play_artist_id']: | ||||
|                     # ignore instance: Wikidata value from "Instance Of" (Qxxxx) | ||||
|                     # ignore wiki_maps_trigger: reference to a javascript | ||||
| @ -194,9 +215,11 @@ def response(resp): | ||||
|                     # There is already an URL for the website | ||||
|                     pass | ||||
|                 elif data_type == 'area': | ||||
|                     attributes.append({'label': data_label, | ||||
|                     attributes.append({ | ||||
|                         'label': data_label, | ||||
|                         'value': area_to_str(data_value), | ||||
|                                        'entity': 'P2046'}) | ||||
|                         'entity': 'P2046' | ||||
|                     }) | ||||
|                     osm_zoom = area_to_osm_zoom(data_value.get('amount')) | ||||
|                 elif data_type == 'coordinates': | ||||
|                     if data_value.get('globe') == 'http://www.wikidata.org/entity/Q2': | ||||
| @ -205,12 +228,16 @@ def response(resp): | ||||
|                         coordinates = info | ||||
|                     else: | ||||
|                         # coordinate NOT on Earth | ||||
|                         attributes.append({'label': data_label, | ||||
|                         attributes.append({ | ||||
|                             'label': data_label, | ||||
|                             'value': data_value, | ||||
|                                            'entity': 'P625'}) | ||||
|                             'entity': 'P625' | ||||
|                         }) | ||||
|                 elif data_type == 'string': | ||||
|                     attributes.append({'label': data_label, | ||||
|                                        'value': data_value}) | ||||
|                     attributes.append({ | ||||
|                         'label': data_label, | ||||
|                         'value': data_value | ||||
|                     }) | ||||
| 
 | ||||
|             if coordinates: | ||||
|                 data_label = coordinates.get('label') | ||||
| @ -218,25 +245,31 @@ def response(resp): | ||||
|                 latitude = data_value.get('latitude') | ||||
|                 longitude = data_value.get('longitude') | ||||
|                 url = get_earth_coordinates_url(latitude, longitude, osm_zoom) | ||||
|                 urls.append({'title': 'OpenStreetMap', | ||||
|                 urls.append({ | ||||
|                     'title': 'OpenStreetMap', | ||||
|                     'url': url, | ||||
|                              'entity': 'P625'}) | ||||
|                     'entity': 'P625' | ||||
|                 }) | ||||
| 
 | ||||
|     if len(heading) > 0: | ||||
|         # TODO get infobox.meta.value where .label='article_title' | ||||
|         # TODO get infobox.meta.value where .label='article_title'    # pylint: disable=fixme | ||||
|         if image is None and len(attributes) == 0 and len(urls) == 1 and\ | ||||
|            len(relatedTopics) == 0 and len(content) == 0: | ||||
|             results.append({'url': urls[0]['url'], | ||||
|             results.append({ | ||||
|                 'url': urls[0]['url'], | ||||
|                 'title': heading, | ||||
|                             'content': content}) | ||||
|                 'content': content | ||||
|             }) | ||||
|         else: | ||||
|             results.append({'infobox': heading, | ||||
|             results.append({ | ||||
|                 'infobox': heading, | ||||
|                 'id': infobox_id, | ||||
|                 'content': content, | ||||
|                 'img_src': image, | ||||
|                 'attributes': attributes, | ||||
|                 'urls': urls, | ||||
|                             'relatedTopics': relatedTopics}) | ||||
|                 'relatedTopics': relatedTopics | ||||
|             }) | ||||
| 
 | ||||
|     return results | ||||
| 
 | ||||
|  | ||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user