Add new engine: Wordnik.com
This commit is contained in:
		
							parent
							
								
									547478089f
								
							
						
					
					
						commit
						7035bed4ee
					
				
							
								
								
									
										77
									
								
								searx/engines/wordnik.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										77
									
								
								searx/engines/wordnik.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,77 @@
 | 
				
			|||||||
 | 
					# SPDX-License-Identifier: AGPL-3.0-or-later
 | 
				
			||||||
 | 
					"""Wordnik (general)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from lxml.html import fromstring
 | 
				
			||||||
 | 
					from searx import logger
 | 
				
			||||||
 | 
					from searx.utils import extract_text
 | 
				
			||||||
 | 
					from searx.raise_for_httperror import raise_for_httperror
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					logger = logger.getChild('Wordnik engine')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# about
 | 
				
			||||||
 | 
					about = {
 | 
				
			||||||
 | 
					    "website": 'https://www.wordnik.com',
 | 
				
			||||||
 | 
					    "wikidata_id": 'Q8034401',
 | 
				
			||||||
 | 
					    "official_api_documentation": None,
 | 
				
			||||||
 | 
					    "use_official_api": False,
 | 
				
			||||||
 | 
					    "require_api_key": False,
 | 
				
			||||||
 | 
					    "results": 'HTML',
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					categories = ['general']
 | 
				
			||||||
 | 
					paging = False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					URL = 'https://www.wordnik.com'
 | 
				
			||||||
 | 
					SEARCH_URL = URL + '/words/{query}'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def request(query, params):
 | 
				
			||||||
 | 
					    params['url'] = SEARCH_URL.format(query=query)
 | 
				
			||||||
 | 
					    logger.debug(f"query_url --> {params['url']}")
 | 
				
			||||||
 | 
					    return params
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def response(resp):
 | 
				
			||||||
 | 
					    results = []
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    raise_for_httperror(resp)
 | 
				
			||||||
 | 
					    dom = fromstring(resp.text)
 | 
				
			||||||
 | 
					    word = extract_text(dom.xpath('//*[@id="headword"]/text()'))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    definitions = []
 | 
				
			||||||
 | 
					    for src in dom.xpath('//*[@id="define"]//h3[@class="source"]'):
 | 
				
			||||||
 | 
					        src_text = extract_text(src).strip()
 | 
				
			||||||
 | 
					        if src_text.startswith('from '):
 | 
				
			||||||
 | 
					            src_text = src_text[5:]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        src_defs = []
 | 
				
			||||||
 | 
					        for def_item in src.xpath('following-sibling::ul[1]/li'):
 | 
				
			||||||
 | 
					            def_abbr = extract_text(def_item.xpath('.//abbr')).strip()
 | 
				
			||||||
 | 
					            def_text = extract_text(def_item).strip()
 | 
				
			||||||
 | 
					            if def_abbr:
 | 
				
			||||||
 | 
					                def_text = def_text[len(def_abbr):].strip()
 | 
				
			||||||
 | 
					            src_defs.append((def_abbr, def_text))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        definitions.append((src_text, src_defs))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if not definitions:
 | 
				
			||||||
 | 
					        return results
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    infobox = ''
 | 
				
			||||||
 | 
					    for src_text, src_defs in definitions:
 | 
				
			||||||
 | 
					        infobox += f"<small>{src_text}</small>"
 | 
				
			||||||
 | 
					        infobox += "<ul>"
 | 
				
			||||||
 | 
					        for def_abbr, def_text in src_defs:
 | 
				
			||||||
 | 
					            if def_abbr:
 | 
				
			||||||
 | 
					                def_abbr += ": "
 | 
				
			||||||
 | 
					            infobox += f"<li><i>{def_abbr}</i> {def_text}</li>"
 | 
				
			||||||
 | 
					        infobox += "</ul>"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    results.append({
 | 
				
			||||||
 | 
					        'infobox': word,
 | 
				
			||||||
 | 
					        'content': infobox,
 | 
				
			||||||
 | 
					    })
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return results
 | 
				
			||||||
@ -1271,6 +1271,14 @@ engines:
 | 
				
			|||||||
    categories: videos
 | 
					    categories: videos
 | 
				
			||||||
    disabled : True
 | 
					    disabled : True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  - name: wordnik
 | 
				
			||||||
 | 
					    engine: wordnik
 | 
				
			||||||
 | 
					    shortcut: def
 | 
				
			||||||
 | 
					    base_url: https://www.wordnik.com/
 | 
				
			||||||
 | 
					    categories: general
 | 
				
			||||||
 | 
					    timeout: 5.0
 | 
				
			||||||
 | 
					    disabled: True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Doku engine lets you access to any Doku wiki instance:
 | 
					# Doku engine lets you access to any Doku wiki instance:
 | 
				
			||||||
# A public one or a privete/corporate one.
 | 
					# A public one or a privete/corporate one.
 | 
				
			||||||
#  - name : ubuntuwiki
 | 
					#  - name : ubuntuwiki
 | 
				
			||||||
 | 
				
			|||||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user