commit
						219af243a2
					
				
							
								
								
									
										14
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										14
									
								
								Makefile
									
									
									
									
									
								
							| @ -13,6 +13,8 @@ include utils/makefile.include | ||||
| PYOBJECTS = searx | ||||
| DOC       = docs | ||||
| PY_SETUP_EXTRAS ?= \[test\] | ||||
| PYLINT_SEARX_DISABLE_OPTION := I,C,R,W0105,W0212,W0511,W0603,W0613,W0621,W0702,W0703,W1401 | ||||
| PYLINT_ADDITIONAL_BUILTINS_FOR_ENGINES := supported_languages,language_aliases | ||||
| 
 | ||||
| include utils/makefile.python | ||||
| include utils/makefile.sphinx | ||||
| @ -210,8 +212,8 @@ gecko.driver: | ||||
| PHONY += test test.sh test.pylint test.pep8 test.unit test.coverage test.robot | ||||
| test: buildenv test.pylint test.pep8 test.unit gecko.driver test.robot | ||||
| 
 | ||||
| # TODO: balance linting with pylint
 | ||||
| 
 | ||||
| # TODO: balance linting with pylint
 | ||||
| test.pylint: pyenvinstall | ||||
| 	$(call cmd,pylint,\
 | ||||
| 		searx/preferences.py \
 | ||||
| @ -219,6 +221,16 @@ test.pylint: pyenvinstall | ||||
| 		searx/engines/gigablast.py \
 | ||||
| 		searx/engines/deviantart.py \
 | ||||
| 	) | ||||
| 	$(call cmd,pylint,\
 | ||||
| 		--disable=$(PYLINT_SEARX_DISABLE_OPTION) \
 | ||||
| 		--additional-builtins=$(PYLINT_ADDITIONAL_BUILTINS_FOR_ENGINES) \
 | ||||
| 		searx/engines \
 | ||||
| 	) | ||||
| 	$(call cmd,pylint,\
 | ||||
| 		--disable=$(PYLINT_SEARX_DISABLE_OPTION) \
 | ||||
| 		--ignore=searx/engines \
 | ||||
| 		searx tests \
 | ||||
| 	) | ||||
| 
 | ||||
| # ignored rules:
 | ||||
| #  E402 module level import not at top of file
 | ||||
|  | ||||
| @ -72,7 +72,7 @@ def load_engine(engine_data): | ||||
| 
 | ||||
|     try: | ||||
|         engine = load_module(engine_module + '.py', engine_dir) | ||||
|     except (SyntaxError, KeyboardInterrupt, SystemExit, SystemError, ImportError, RuntimeError) as e: | ||||
|     except (SyntaxError, KeyboardInterrupt, SystemExit, SystemError, ImportError, RuntimeError): | ||||
|         logger.exception('Fatal exception in engine "{}"'.format(engine_module)) | ||||
|         sys.exit(1) | ||||
|     except: | ||||
| @ -234,7 +234,7 @@ def get_engines_stats(preferences): | ||||
|     results = to_percentage(results, max_results) | ||||
|     scores = to_percentage(scores, max_score) | ||||
|     scores_per_result = to_percentage(scores_per_result, max_score_per_result) | ||||
|     erros = to_percentage(errors, max_errors) | ||||
|     errors = to_percentage(errors, max_errors) | ||||
| 
 | ||||
|     return [ | ||||
|         ( | ||||
|  | ||||
| @ -41,7 +41,6 @@ def response(resp): | ||||
|         # defaults | ||||
|         filesize = 0 | ||||
|         magnet_link = "magnet:?xt=urn:btih:{}&tr=http://tracker.acgsou.com:2710/announce" | ||||
|         torrent_link = "" | ||||
| 
 | ||||
|         try: | ||||
|             category = extract_text(result.xpath(xpath_category)[0]) | ||||
|  | ||||
| @ -61,7 +61,7 @@ def response(resp): | ||||
|             content = content_string.format(doi_content="", abstract_content=abstract) | ||||
| 
 | ||||
|         if len(content) > 300: | ||||
|                     content = content[0:300] + "..." | ||||
|             content = content[0:300] + "..." | ||||
|         # TODO: center snippet on query term | ||||
| 
 | ||||
|         publishedDate = datetime.strptime(entry.xpath('.//published')[0].text, '%Y-%m-%dT%H:%M:%SZ') | ||||
|  | ||||
| @ -80,10 +80,7 @@ def response(resp): | ||||
| 
 | ||||
|         date = datetime.now()  # needed in case no dcdate is available for an item | ||||
|         for item in entry: | ||||
|             if item.attrib["name"] == "dchdate": | ||||
|                 harvestDate = item.text | ||||
| 
 | ||||
|             elif item.attrib["name"] == "dcdate": | ||||
|             if item.attrib["name"] == "dcdate": | ||||
|                 date = item.text | ||||
| 
 | ||||
|             elif item.attrib["name"] == "dctitle": | ||||
|  | ||||
| @ -18,7 +18,7 @@ from json import loads | ||||
| from searx.utils import match_language | ||||
| 
 | ||||
| from searx.engines.bing import language_aliases | ||||
| from searx.engines.bing import _fetch_supported_languages, supported_languages_url  # NOQA | ||||
| from searx.engines.bing import _fetch_supported_languages, supported_languages_url  # NOQA # pylint: disable=unused-import | ||||
| 
 | ||||
| # engine dependent config | ||||
| categories = ['images'] | ||||
|  | ||||
| @ -17,7 +17,7 @@ from urllib.parse import urlencode, urlparse, parse_qsl | ||||
| from lxml import etree | ||||
| from searx.utils import list_get, match_language | ||||
| from searx.engines.bing import language_aliases | ||||
| from searx.engines.bing import _fetch_supported_languages, supported_languages_url  # NOQA | ||||
| from searx.engines.bing import _fetch_supported_languages, supported_languages_url  # NOQA # pylint: disable=unused-import | ||||
| 
 | ||||
| # engine dependent config | ||||
| categories = ['news'] | ||||
|  | ||||
| @ -16,7 +16,7 @@ from urllib.parse import urlencode | ||||
| from searx.utils import match_language | ||||
| 
 | ||||
| from searx.engines.bing import language_aliases | ||||
| from searx.engines.bing import _fetch_supported_languages, supported_languages_url  # NOQA | ||||
| from searx.engines.bing import _fetch_supported_languages, supported_languages_url  # NOQA # pylint: disable=unused-import | ||||
| 
 | ||||
| categories = ['videos'] | ||||
| paging = True | ||||
|  | ||||
| @ -41,8 +41,6 @@ def request(query, params): | ||||
|     from_currency = name_to_iso4217(from_currency.strip()) | ||||
|     to_currency = name_to_iso4217(to_currency.strip()) | ||||
| 
 | ||||
|     q = (from_currency + to_currency).upper() | ||||
| 
 | ||||
|     params['url'] = url.format(from_currency, to_currency) | ||||
|     params['amount'] = amount | ||||
|     params['from'] = from_currency | ||||
|  | ||||
| @ -49,11 +49,11 @@ correction_xpath = '//div[@id="did_you_mean"]//a' | ||||
| 
 | ||||
| 
 | ||||
| # match query's language to a region code that duckduckgo will accept | ||||
| def get_region_code(lang, lang_list=[]): | ||||
| def get_region_code(lang, lang_list=None): | ||||
|     if lang == 'all': | ||||
|         return None | ||||
| 
 | ||||
|     lang_code = match_language(lang, lang_list, language_aliases, 'wt-WT') | ||||
|     lang_code = match_language(lang, lang_list or [], language_aliases, 'wt-WT') | ||||
|     lang_parts = lang_code.split('-') | ||||
| 
 | ||||
|     # country code goes first | ||||
|  | ||||
| @ -16,7 +16,7 @@ from lxml import html | ||||
| from searx import logger | ||||
| from searx.data import WIKIDATA_UNITS | ||||
| from searx.engines.duckduckgo import language_aliases | ||||
| from searx.engines.duckduckgo import _fetch_supported_languages, supported_languages_url  # NOQA | ||||
| from searx.engines.duckduckgo import _fetch_supported_languages, supported_languages_url  # NOQA # pylint: disable=unused-import | ||||
| from searx.utils import extract_text, html_to_text, match_language, get_string_replaces_function | ||||
| from searx.external_urls import get_external_url, get_earth_coordinates_url, area_to_osm_zoom | ||||
| 
 | ||||
|  | ||||
| @ -16,7 +16,7 @@ | ||||
| from json import loads | ||||
| from urllib.parse import urlencode | ||||
| from searx.engines.duckduckgo import get_region_code | ||||
| from searx.engines.duckduckgo import _fetch_supported_languages, supported_languages_url  # NOQA | ||||
| from searx.engines.duckduckgo import _fetch_supported_languages, supported_languages_url  # NOQA # pylint: disable=unused-import | ||||
| from searx.poolrequests import get | ||||
| 
 | ||||
| # engine dependent config | ||||
|  | ||||
| @ -8,7 +8,7 @@ search_url = url + '/search.php?{query}&size_i=0&size_f=100000000&engine_r=1&eng | ||||
| paging = True | ||||
| 
 | ||||
| 
 | ||||
| class FilecropResultParser(HTMLParser): | ||||
| class FilecropResultParser(HTMLParser):  # pylint: disable=W0223  # (see https://bugs.python.org/issue31844) | ||||
| 
 | ||||
|     def __init__(self): | ||||
|         HTMLParser.__init__(self) | ||||
|  | ||||
| @ -29,10 +29,7 @@ from lxml import html | ||||
| from flask_babel import gettext | ||||
| from searx import logger | ||||
| from searx.utils import extract_text, eval_xpath | ||||
| from searx.engines.google import _fetch_supported_languages, supported_languages_url  # NOQA | ||||
| 
 | ||||
| # pylint: disable=unused-import | ||||
| # pylint: enable=unused-import | ||||
| from searx.engines.google import _fetch_supported_languages, supported_languages_url  # NOQA # pylint: disable=unused-import | ||||
| 
 | ||||
| from searx.engines.google import ( | ||||
|     get_lang_country, | ||||
|  | ||||
| @ -13,7 +13,7 @@ | ||||
| from urllib.parse import urlencode | ||||
| from lxml import html | ||||
| from searx.utils import match_language | ||||
| from searx.engines.google import _fetch_supported_languages, supported_languages_url  # NOQA | ||||
| from searx.engines.google import _fetch_supported_languages, supported_languages_url  # NOQA # pylint: disable=unused-import | ||||
| 
 | ||||
| # search-url | ||||
| categories = ['news'] | ||||
|  | ||||
| @ -90,6 +90,5 @@ def response(resp): | ||||
| 
 | ||||
| 
 | ||||
| def _fetch_supported_languages(resp): | ||||
|     ret_val = {} | ||||
|     peertube_languages = list(loads(resp.text).keys()) | ||||
|     return peertube_languages | ||||
|  | ||||
| @ -81,7 +81,7 @@ def response(resp): | ||||
|             pass | ||||
| 
 | ||||
|         if len(content) > 300: | ||||
|                     content = content[0:300] + "..." | ||||
|             content = content[0:300] + "..." | ||||
|         # TODO: center snippet on query term | ||||
| 
 | ||||
|         res_dict = {'url': url, | ||||
|  | ||||
| @ -23,7 +23,7 @@ from searx.data import WIKIDATA_UNITS | ||||
| from searx.poolrequests import post, get | ||||
| from searx.utils import match_language, searx_useragent, get_string_replaces_function | ||||
| from searx.external_urls import get_external_url, get_earth_coordinates_url, area_to_osm_zoom | ||||
| from searx.engines.wikipedia import _fetch_supported_languages, supported_languages_url  # NOQA | ||||
| from searx.engines.wikipedia import _fetch_supported_languages, supported_languages_url  # NOQA # pylint: disable=unused-import | ||||
| 
 | ||||
| logger = logger.getChild('wikidata') | ||||
| 
 | ||||
| @ -228,7 +228,7 @@ def get_results(attribute_result, attributes, language): | ||||
|                 # Should use normalized value p:P2046/psn:P2046/wikibase:quantityAmount | ||||
|                 area = attribute_result.get('P2046') | ||||
|                 osm_zoom = area_to_osm_zoom(area) if area else 19 | ||||
|                 url = attribute.get_str(attribute_result, language, osm_zoom=osm_zoom) | ||||
|                 url = attribute.get_geo_url(attribute_result, osm_zoom=osm_zoom) | ||||
|                 if url: | ||||
|                     infobox_urls.append({'title': attribute.get_label(language), | ||||
|                                          'url': url, | ||||
| @ -546,7 +546,14 @@ class WDGeoAttribute(WDAttribute): | ||||
|     def get_group_by(self): | ||||
|         return self.get_select() | ||||
| 
 | ||||
|     def get_str(self, result, language, osm_zoom=19): | ||||
|     def get_str(self, result, language): | ||||
|         latitude = result.get(self.name + 'Lat') | ||||
|         longitude = result.get(self.name + 'Long') | ||||
|         if latitude and longitude: | ||||
|             return latitude + ' ' + longitude | ||||
|         return None | ||||
| 
 | ||||
|     def get_geo_url(self, result, osm_zoom=19): | ||||
|         latitude = result.get(self.name + 'Lat') | ||||
|         longitude = result.get(self.name + 'Long') | ||||
|         if latitude and longitude: | ||||
|  | ||||
| @ -14,7 +14,7 @@ from datetime import datetime, timedelta | ||||
| from urllib.parse import urlencode | ||||
| from lxml import html | ||||
| from searx.engines.yahoo import parse_url, language_aliases | ||||
| from searx.engines.yahoo import _fetch_supported_languages, supported_languages_url  # NOQA | ||||
| from searx.engines.yahoo import _fetch_supported_languages, supported_languages_url  # NOQA # pylint: disable=unused-import | ||||
| from dateutil import parser | ||||
| from searx.utils import extract_text, extract_url, match_language | ||||
| 
 | ||||
|  | ||||
| @ -158,8 +158,8 @@ def prepare_package_resources(pkg, name): | ||||
| 
 | ||||
| def sha_sum(filename): | ||||
|     with open(filename, "rb") as f: | ||||
|         bytes = f.read() | ||||
|         return sha256(bytes).hexdigest() | ||||
|         file_content_bytes = f.read() | ||||
|         return sha256(file_content_bytes).hexdigest() | ||||
| 
 | ||||
| 
 | ||||
| plugins = PluginStore() | ||||
|  | ||||
| @ -92,15 +92,15 @@ class RawTextQuery: | ||||
|                         or lang == english_name | ||||
|                         or lang.replace('-', ' ') == country)\ | ||||
|                        and lang not in self.languages: | ||||
|                             searx_query_part = True | ||||
|                             lang_parts = lang_id.split('-') | ||||
|                             if len(lang_parts) == 2: | ||||
|                                 self.languages.append(lang_parts[0] + '-' + lang_parts[1].upper()) | ||||
|                             else: | ||||
|                                 self.languages.append(lang_id) | ||||
|                             # to ensure best match (first match is not necessarily the best one) | ||||
|                             if lang == lang_id: | ||||
|                                 break | ||||
|                         searx_query_part = True | ||||
|                         lang_parts = lang_id.split('-') | ||||
|                         if len(lang_parts) == 2: | ||||
|                             self.languages.append(lang_parts[0] + '-' + lang_parts[1].upper()) | ||||
|                         else: | ||||
|                             self.languages.append(lang_id) | ||||
|                         # to ensure best match (first match is not necessarily the best one) | ||||
|                         if lang == lang_id: | ||||
|                             break | ||||
| 
 | ||||
|                 # user may set a valid, yet not selectable language | ||||
|                 if VALID_LANGUAGE_CODE.match(lang): | ||||
|  | ||||
| @ -298,7 +298,7 @@ class ResultContainer: | ||||
|         gresults = [] | ||||
|         categoryPositions = {} | ||||
| 
 | ||||
|         for i, res in enumerate(results): | ||||
|         for res in results: | ||||
|             # FIXME : handle more than one category per engine | ||||
|             res['category'] = engines[res['engine']].categories[0] | ||||
| 
 | ||||
|  | ||||
| @ -43,9 +43,8 @@ else: | ||||
|         logger.info('max_request_timeout={0} second(s)'.format(max_request_timeout)) | ||||
|     else: | ||||
|         logger.critical('outgoing.max_request_timeout if defined has to be float') | ||||
|         from sys import exit | ||||
| 
 | ||||
|         exit(1) | ||||
|         import sys | ||||
|         sys.exit(1) | ||||
| 
 | ||||
| 
 | ||||
| class EngineRef: | ||||
|  | ||||
| @ -52,7 +52,7 @@ class HTMLTextExtractorException(Exception): | ||||
|     pass | ||||
| 
 | ||||
| 
 | ||||
| class HTMLTextExtractor(HTMLParser): | ||||
| class HTMLTextExtractor(HTMLParser):  # pylint: disable=W0223  # (see https://bugs.python.org/issue31844) | ||||
| 
 | ||||
|     def __init__(self): | ||||
|         HTMLParser.__init__(self) | ||||
| @ -74,18 +74,18 @@ class HTMLTextExtractor(HTMLParser): | ||||
|     def is_valid_tag(self): | ||||
|         return not self.tags or self.tags[-1] not in blocked_tags | ||||
| 
 | ||||
|     def handle_data(self, d): | ||||
|     def handle_data(self, data): | ||||
|         if not self.is_valid_tag(): | ||||
|             return | ||||
|         self.result.append(d) | ||||
|         self.result.append(data) | ||||
| 
 | ||||
|     def handle_charref(self, number): | ||||
|     def handle_charref(self, name): | ||||
|         if not self.is_valid_tag(): | ||||
|             return | ||||
|         if number[0] in ('x', 'X'): | ||||
|             codepoint = int(number[1:], 16) | ||||
|         if name[0] in ('x', 'X'): | ||||
|             codepoint = int(name[1:], 16) | ||||
|         else: | ||||
|             codepoint = int(number) | ||||
|             codepoint = int(name) | ||||
|         self.result.append(chr(codepoint)) | ||||
| 
 | ||||
|     def handle_entityref(self, name): | ||||
| @ -380,7 +380,7 @@ def _get_lang_to_lc_dict(lang_list): | ||||
|     return value | ||||
| 
 | ||||
| 
 | ||||
| def _match_language(lang_code, lang_list=[], custom_aliases={}): | ||||
| def _match_language(lang_code, lang_list=[], custom_aliases={}):  # pylint: disable=W0102 | ||||
|     """auxiliary function to match lang_code in lang_list""" | ||||
|     # replace language code with a custom alias if necessary | ||||
|     if lang_code in custom_aliases: | ||||
| @ -403,7 +403,7 @@ def _match_language(lang_code, lang_list=[], custom_aliases={}): | ||||
|     return _get_lang_to_lc_dict(lang_list).get(lang_code, None) | ||||
| 
 | ||||
| 
 | ||||
| def match_language(locale_code, lang_list=[], custom_aliases={}, fallback='en-US'): | ||||
| def match_language(locale_code, lang_list=[], custom_aliases={}, fallback='en-US'):  # pylint: disable=W0102 | ||||
|     """get the language code from lang_list that best matches locale_code""" | ||||
|     # try to get language from given locale_code | ||||
|     language = _match_language(locale_code, lang_list, custom_aliases) | ||||
|  | ||||
| @ -466,7 +466,7 @@ def pre_request(): | ||||
|     else: | ||||
|         try: | ||||
|             preferences.parse_dict(request.form) | ||||
|         except Exception as e: | ||||
|         except Exception: | ||||
|             logger.exception('invalid settings') | ||||
|             request.errors.append(gettext('Invalid settings')) | ||||
| 
 | ||||
| @ -819,7 +819,6 @@ def preferences(): | ||||
| 
 | ||||
|     # render preferences | ||||
|     image_proxy = request.preferences.get_value('image_proxy') | ||||
|     lang = request.preferences.get_value('language') | ||||
|     disabled_engines = request.preferences.engines.get_disabled() | ||||
|     allowed_plugins = request.preferences.plugins.get_enabled() | ||||
| 
 | ||||
|  | ||||
| @ -1,23 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| 
 | ||||
| import os | ||||
| import unittest2 as unittest | ||||
| from plone.testing import layered | ||||
| from robotsuite import RobotTestSuite | ||||
| from searx.testing import SEARXROBOTLAYER | ||||
| 
 | ||||
| 
 | ||||
| def test_suite(): | ||||
|     suite = unittest.TestSuite() | ||||
|     current_dir = os.path.abspath(os.path.dirname(__file__)) | ||||
|     robot_dir = os.path.join(current_dir, 'robot') | ||||
|     tests = [ | ||||
|         os.path.join('robot', f) for f in | ||||
|         os.listdir(robot_dir) if f.endswith('.robot') and | ||||
|         f.startswith('test_') | ||||
|     ] | ||||
|     for test in tests: | ||||
|         suite.addTests([ | ||||
|             layered(RobotTestSuite(test), layer=SEARXROBOTLAYER), | ||||
|         ]) | ||||
|     return suite | ||||
| @ -31,10 +31,10 @@ class PluginStoreTest(SearxTestCase): | ||||
|         request = Mock() | ||||
|         store.call([], 'asdf', request, Mock()) | ||||
| 
 | ||||
|         self.assertFalse(testplugin.asdf.called) | ||||
|         self.assertFalse(testplugin.asdf.called)  # pylint: disable=E1101 | ||||
| 
 | ||||
|         store.call([testplugin], 'asdf', request, Mock()) | ||||
|         self.assertTrue(testplugin.asdf.called) | ||||
|         self.assertTrue(testplugin.asdf.called)  # pylint: disable=E1101 | ||||
| 
 | ||||
| 
 | ||||
| class SelfIPTest(SearxTestCase): | ||||
|  | ||||
| @ -5,8 +5,8 @@ from searx.testing import SearxTestCase | ||||
| 
 | ||||
| class PluginStub: | ||||
| 
 | ||||
|     def __init__(self, id, default_on): | ||||
|         self.id = id | ||||
|     def __init__(self, plugin_id, default_on): | ||||
|         self.id = plugin_id | ||||
|         self.default_on = default_on | ||||
| 
 | ||||
| 
 | ||||
| @ -15,11 +15,11 @@ class TestSettings(SearxTestCase): | ||||
| 
 | ||||
|     def test_map_setting_invalid_initialization(self): | ||||
|         with self.assertRaises(MissingArgumentException): | ||||
|             setting = MapSetting(3, wrong_argument={'0': 0}) | ||||
|             MapSetting(3, wrong_argument={'0': 0}) | ||||
| 
 | ||||
|     def test_map_setting_invalid_default_value(self): | ||||
|         with self.assertRaises(ValidationException): | ||||
|             setting = MapSetting(3, map={'dog': 1, 'bat': 2}) | ||||
|             MapSetting(3, map={'dog': 1, 'bat': 2}) | ||||
| 
 | ||||
|     def test_map_setting_invalid_choice(self): | ||||
|         setting = MapSetting(2, map={'dog': 1, 'bat': 2}) | ||||
| @ -36,18 +36,14 @@ class TestSettings(SearxTestCase): | ||||
|         setting.parse('bat') | ||||
|         self.assertEqual(setting.get_value(), 2) | ||||
| 
 | ||||
|     def test_enum_setting_invalid_initialization(self): | ||||
|         with self.assertRaises(MissingArgumentException): | ||||
|             setting = EnumStringSetting('cat', wrong_argument=[0, 1, 2]) | ||||
| 
 | ||||
|     # enum settings | ||||
|     def test_enum_setting_invalid_initialization(self): | ||||
|         with self.assertRaises(MissingArgumentException): | ||||
|             setting = EnumStringSetting('cat', wrong_argument=[0, 1, 2]) | ||||
|             EnumStringSetting('cat', wrong_argument=[0, 1, 2]) | ||||
| 
 | ||||
|     def test_enum_setting_invalid_default_value(self): | ||||
|         with self.assertRaises(ValidationException): | ||||
|             setting = EnumStringSetting(3, choices=[0, 1, 2]) | ||||
|             EnumStringSetting(3, choices=[0, 1, 2]) | ||||
| 
 | ||||
|     def test_enum_setting_invalid_choice(self): | ||||
|         setting = EnumStringSetting(0, choices=[0, 1, 2]) | ||||
| @ -67,11 +63,11 @@ class TestSettings(SearxTestCase): | ||||
|     # multiple choice settings | ||||
|     def test_multiple_setting_invalid_initialization(self): | ||||
|         with self.assertRaises(MissingArgumentException): | ||||
|             setting = MultipleChoiceSetting(['2'], wrong_argument=['0', '1', '2']) | ||||
|             MultipleChoiceSetting(['2'], wrong_argument=['0', '1', '2']) | ||||
| 
 | ||||
|     def test_multiple_setting_invalid_default_value(self): | ||||
|         with self.assertRaises(ValidationException): | ||||
|             setting = MultipleChoiceSetting(['3', '4'], choices=['0', '1', '2']) | ||||
|             MultipleChoiceSetting(['3', '4'], choices=['0', '1', '2']) | ||||
| 
 | ||||
|     def test_multiple_setting_invalid_choice(self): | ||||
|         setting = MultipleChoiceSetting(['1', '2'], choices=['0', '1', '2']) | ||||
|  | ||||
| @ -145,7 +145,7 @@ class ViewsTestCase(SearxTestCase): | ||||
|         result = self.app.post('/', data={'q': 'test', 'format': 'rss'}) | ||||
|         self.assertEqual(result.status_code, 308) | ||||
| 
 | ||||
|     def test_index_rss(self): | ||||
|     def test_search_rss(self): | ||||
|         result = self.app.post('/search', data={'q': 'test', 'format': 'rss'}) | ||||
| 
 | ||||
|         self.assertIn( | ||||
|  | ||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user
	 Alexandre Flament
						Alexandre Flament