[pylint] searx/network/client.py & add global (TRANSPORT_KWARGS)
No functional change! - fix messages from pylint - add ``global TRANSPORT_KWARGS`` - normalized python_socks imports Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
This commit is contained in:
		
							parent
							
								
									8033518899
								
							
						
					
					
						commit
						b595c482d0
					
				| @ -1,14 +1,19 @@ | |||||||
| # SPDX-License-Identifier: AGPL-3.0-or-later | # SPDX-License-Identifier: AGPL-3.0-or-later | ||||||
|  | # lint: pylint | ||||||
|  | # pylint: disable=missing-module-docstring, missing-function-docstring, global-statement | ||||||
| 
 | 
 | ||||||
| import asyncio | import asyncio | ||||||
| import logging | import logging | ||||||
| import threading | import threading | ||||||
| 
 |  | ||||||
| import httpcore | import httpcore | ||||||
| import httpx | import httpx | ||||||
| from httpx_socks import AsyncProxyTransport | from httpx_socks import AsyncProxyTransport | ||||||
| from python_socks import parse_proxy_url | from python_socks import ( | ||||||
| import python_socks._errors |     parse_proxy_url, | ||||||
|  |     ProxyConnectionError, | ||||||
|  |     ProxyTimeoutError, | ||||||
|  |     ProxyError | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| from searx import logger | from searx import logger | ||||||
| 
 | 
 | ||||||
| @ -30,7 +35,11 @@ TRANSPORT_KWARGS = { | |||||||
| } | } | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def close_connections_for_url(connection_pool: httpcore.AsyncConnectionPool, url: httpcore._utils.URL): | # pylint: disable=protected-access | ||||||
|  | async def close_connections_for_url( | ||||||
|  |         connection_pool: httpcore.AsyncConnectionPool, | ||||||
|  |         url: httpcore._utils.URL ): | ||||||
|  | 
 | ||||||
|     origin = httpcore._utils.url_to_origin(url) |     origin = httpcore._utils.url_to_origin(url) | ||||||
|     logger.debug('Drop connections for %r', origin) |     logger.debug('Drop connections for %r', origin) | ||||||
|     connections_to_close = connection_pool._connections_for_origin(origin) |     connections_to_close = connection_pool._connections_for_origin(origin) | ||||||
| @ -40,6 +49,7 @@ async def close_connections_for_url(connection_pool: httpcore.AsyncConnectionPoo | |||||||
|             await connection.aclose() |             await connection.aclose() | ||||||
|         except httpcore.NetworkError as e: |         except httpcore.NetworkError as e: | ||||||
|             logger.warning('Error closing an existing connection', exc_info=e) |             logger.warning('Error closing an existing connection', exc_info=e) | ||||||
|  | # pylint: enable=protected-access | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def get_sslcontexts(proxy_url=None, cert=None, verify=True, trust_env=True, http2=False): | def get_sslcontexts(proxy_url=None, cert=None, verify=True, trust_env=True, http2=False): | ||||||
| @ -80,9 +90,7 @@ class AsyncProxyTransportFixed(AsyncProxyTransport): | |||||||
|             retry -= 1 |             retry -= 1 | ||||||
|             try: |             try: | ||||||
|                 return await super().arequest(method, url, headers, stream, ext) |                 return await super().arequest(method, url, headers, stream, ext) | ||||||
|             except (python_socks._errors.ProxyConnectionError, |             except (ProxyConnectionError, ProxyTimeoutError, ProxyError) as e: | ||||||
|                     python_socks._errors.ProxyTimeoutError, |  | ||||||
|                     python_socks._errors.ProxyError) as e: |  | ||||||
|                 raise httpcore.ProxyError(e) |                 raise httpcore.ProxyError(e) | ||||||
|             except OSError as e: |             except OSError as e: | ||||||
|                 # socket.gaierror when DNS resolution fails |                 # socket.gaierror when DNS resolution fails | ||||||
| @ -114,7 +122,7 @@ class AsyncHTTPTransportFixed(httpx.AsyncHTTPTransport): | |||||||
|             except httpcore.CloseError as e: |             except httpcore.CloseError as e: | ||||||
|                 # httpcore.CloseError: [Errno 104] Connection reset by peer |                 # httpcore.CloseError: [Errno 104] Connection reset by peer | ||||||
|                 # raised by _keepalive_sweep() |                 # raised by _keepalive_sweep() | ||||||
|                 #   from https://github.com/encode/httpcore/blob/4b662b5c42378a61e54d673b4c949420102379f5/httpcore/_backends/asyncio.py#L198  # noqa |                 #   from https://github.com/encode/httpcore/blob/4b662b5c42378a61e54d673b4c949420102379f5/httpcore/_backends/asyncio.py#L198  # pylint: disable=line-too-long | ||||||
|                 await close_connections_for_url(self._pool, url) |                 await close_connections_for_url(self._pool, url) | ||||||
|                 logger.warning('httpcore.CloseError: retry', exc_info=e) |                 logger.warning('httpcore.CloseError: retry', exc_info=e) | ||||||
|                 # retry |                 # retry | ||||||
| @ -129,6 +137,7 @@ class AsyncHTTPTransportFixed(httpx.AsyncHTTPTransport): | |||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit, retries): | def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit, retries): | ||||||
|  |     global TRANSPORT_KWARGS | ||||||
|     # support socks5h (requests compatibility): |     # support socks5h (requests compatibility): | ||||||
|     # https://requests.readthedocs.io/en/master/user/advanced/#socks |     # https://requests.readthedocs.io/en/master/user/advanced/#socks | ||||||
|     # socks5://   hostname is resolved on client side |     # socks5://   hostname is resolved on client side | ||||||
| @ -156,14 +165,18 @@ def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit | |||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def get_transport(verify, http2, local_address, proxy_url, limit, retries): | def get_transport(verify, http2, local_address, proxy_url, limit, retries): | ||||||
|  |     global TRANSPORT_KWARGS | ||||||
|     verify = get_sslcontexts(None, None, True, False, http2) if verify is True else verify |     verify = get_sslcontexts(None, None, True, False, http2) if verify is True else verify | ||||||
|     return AsyncHTTPTransportFixed(verify=verify, |     return AsyncHTTPTransportFixed( | ||||||
|                                    http2=http2, |         # pylint: disable=protected-access | ||||||
|                                    local_address=local_address, |         verify=verify, | ||||||
|                                    proxy=httpx._config.Proxy(proxy_url) if proxy_url else None, |         http2=http2, | ||||||
|                                    limits=limit, |         local_address=local_address, | ||||||
|                                    retries=retries, |         proxy=httpx._config.Proxy(proxy_url) if proxy_url else None, | ||||||
|                                    **TRANSPORT_KWARGS) |         limits=limit, | ||||||
|  |         retries=retries, | ||||||
|  |         **TRANSPORT_KWARGS | ||||||
|  |     ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def iter_proxies(proxies): | def iter_proxies(proxies): | ||||||
| @ -175,9 +188,11 @@ def iter_proxies(proxies): | |||||||
|             yield pattern, proxy_url |             yield pattern, proxy_url | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def new_client(enable_http, verify, enable_http2, | def new_client( | ||||||
|                max_connections, max_keepalive_connections, keepalive_expiry, |         # pylint: disable=too-many-arguments | ||||||
|                proxies, local_address, retries, max_redirects): |         enable_http, verify, enable_http2, | ||||||
|  |         max_connections, max_keepalive_connections, keepalive_expiry, | ||||||
|  |         proxies, local_address, retries, max_redirects  ): | ||||||
|     limit = httpx.Limits(max_connections=max_connections, |     limit = httpx.Limits(max_connections=max_connections, | ||||||
|                          max_keepalive_connections=max_keepalive_connections, |                          max_keepalive_connections=max_keepalive_connections, | ||||||
|                          keepalive_expiry=keepalive_expiry) |                          keepalive_expiry=keepalive_expiry) | ||||||
| @ -217,12 +232,12 @@ def init(): | |||||||
|         LOOP = asyncio.new_event_loop() |         LOOP = asyncio.new_event_loop() | ||||||
|         LOOP.run_forever() |         LOOP.run_forever() | ||||||
| 
 | 
 | ||||||
|     th = threading.Thread( |     thread = threading.Thread( | ||||||
|         target=loop_thread, |         target=loop_thread, | ||||||
|         name='asyncio_loop', |         name='asyncio_loop', | ||||||
|         daemon=True, |         daemon=True, | ||||||
|     ) |     ) | ||||||
|     th.start() |     thread.start() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| init() | init() | ||||||
|  | |||||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user
	 Markus Heiser
						Markus Heiser