From ac30b6c29561b0a3bbe8e6a48ae17eb9c236eef0 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Wed, 25 Apr 2018 11:07:17 +0300 Subject: [PATCH 01/14] Work on --- aiohttp/client.py | 13 +++++++++---- aiohttp/client_reqrep.py | 21 +++++++++++++++++++++ 2 files changed, 30 insertions(+), 4 deletions(-) diff --git a/aiohttp/client.py b/aiohttp/client.py index 2e5d4ee38cd..b3e446e55aa 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -52,8 +52,8 @@ class ClientSession: '_source_traceback', '_connector', 'requote_redirect_url', '_loop', '_cookie_jar', '_connector_owner', '_default_auth', - '_version', '_json_serialize', '_read_timeout', - '_conn_timeout', '_raise_for_status', '_auto_decompress', + '_version', '_json_serialize', + '_timeout', '_raise_for_status', '_auto_decompress', '_trust_env', '_default_headers', '_skip_auto_headers', '_request_class', '_response_class', '_ws_response_class', '_trace_configs']) @@ -71,6 +71,7 @@ def __init__(self, *, connector=None, loop=None, cookies=None, version=http.HttpVersion11, cookie_jar=None, connector_owner=True, raise_for_status=False, read_timeout=sentinel, conn_timeout=None, + timeout=sentinel, auto_decompress=True, trust_env=False, trace_configs=None): @@ -117,9 +118,13 @@ def __init__(self, *, connector=None, loop=None, cookies=None, self._default_auth = auth self._version = version self._json_serialize = json_serialize - self._read_timeout = (read_timeout if read_timeout is not sentinel - else DEFAULT_TIMEOUT) self._conn_timeout = conn_timeout + if timeout is not sentinel: + self._timeout = timeout + if read_timeout is sentinel: + read_timeout = DEFAULT_TIMEOUT + self._timeout = RequestTimeouts(read_timeout=read_timeout, + connect_timeout=conn_timeout) self._raise_for_status = raise_for_status self._auto_decompress = auto_decompress self._trust_env = trust_env diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index b0a1f5e342f..d182e997778 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -39,6 +39,27 @@ __all__ = ('ClientRequest', 'ClientResponse', 'RequestInfo', 'Fingerprint') +@attr.s(frozen=True, slots=True) +class RequestTimeouts: + read_timeout = attr.ib(type=float, default=None) + connect_timeout = attr.ib(type=float, default=None) + + total_timeout = attr.ib(type=float, default=None) + pool_queue_timeout = attr.ib(type=float, default=None) + dns_resolution_timeout = attr.ib(type=float, default=None) + socket_connect_timeout = attr.ib(type=float, default=None) + connection_acquiring_timeout = attr.ib(type=float, default=None) + new_connection_timeout = attr.ib(type=float, default=None) + http_header_timeout = attr.ib(type=float, default=None) + response_body_timeout = attr.ib(type=float, default=None) + + # to create a timeout specific for a single request, either + # - create a completely new one to overwrite the default + # - or use http://www.attrs.org/en/stable/api.html#attr.evolve + # to overwrite the defaults + + + json_re = re.compile('^application/(?:[\w.+-]+?\+)?json') From 5c07bdf07b66b74746eba9fed0ada781c1f10e96 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Wed, 2 May 2018 17:50:48 +0300 Subject: [PATCH 02/14] Implement basic timeout class --- aiohttp/client.py | 43 +++++++++++++++++++++++++++++----------- aiohttp/client_reqrep.py | 28 +++++++++++++------------- 2 files changed, 45 insertions(+), 26 deletions(-) diff --git a/aiohttp/client.py b/aiohttp/client.py index b3e446e55aa..795db8a611d 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -10,6 +10,7 @@ import warnings from collections.abc import Coroutine +import attr from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr from yarl import URL @@ -21,7 +22,8 @@ ServerTimeoutError, TooManyRedirects, WSServerHandshakeError) from .client_reqrep import * # noqa -from .client_reqrep import ClientRequest, ClientResponse, _merge_ssl_params +from .client_reqrep import (ClientRequest, ClientResponse, ClientTimeout, + _merge_ssl_params) from .client_ws import ClientWebSocketResponse from .connector import * # noqa from .connector import TCPConnector @@ -41,8 +43,8 @@ ('ClientSession', 'ClientWebSocketResponse', 'request')) -# 5 Minute default read and connect timeout -DEFAULT_TIMEOUT = 5 * 60 +# 5 Minute default read timeout +DEFAULT_TIMEOUT = ClientTimeout(total=5*60) class ClientSession: @@ -118,13 +120,26 @@ def __init__(self, *, connector=None, loop=None, cookies=None, self._default_auth = auth self._version = version self._json_serialize = json_serialize - self._conn_timeout = conn_timeout if timeout is not sentinel: self._timeout = timeout - if read_timeout is sentinel: - read_timeout = DEFAULT_TIMEOUT - self._timeout = RequestTimeouts(read_timeout=read_timeout, - connect_timeout=conn_timeout) + else: + self._timeout = DEFAULT_TIMEOUT + if read_timeout is not sentinel: + if timeout is not sentinel: + raise ValueError("read_timeout and timeout parameters " + "conflict, please setup " + "timeout.read") + else: + self._timeout = attr.evolve(self._timeout, + total=read_timeout) + if conn_timeout is not None: + if timeout is not sentinel: + raise ValueError("conn_timeout and timeout parameters " + "conflict, please setup " + "timeout.connect") + else: + self._timeout = attr.evolve(self._timeout, + connect=conn_timeout) self._raise_for_status = raise_for_status self._auto_decompress = auto_decompress self._trust_env = trust_env @@ -249,11 +264,14 @@ async def _request(self, method, url, *, except ValueError: raise InvalidURL(proxy) + if timeout is sentinel: + timeout = self._timeout + else: + if not isinstance(timeout, ClientTimeout): + timeout = ClientTimeout(total=timeout) # timeout is cumulative for all request operations # (request, redirects, responses, data consuming) - tm = TimeoutHandle( - self._loop, - timeout if timeout is not sentinel else self._read_timeout) + tm = TimeoutHandle(self._loop, timeout.total) handle = tm.start() traces = [ @@ -319,7 +337,8 @@ async def _request(self, method, url, *, # connection timeout try: - with CeilTimeout(self._conn_timeout, loop=self._loop): + with CeilTimeout(self._timeout.connect, + loop=self._loop): conn = await self._connector.connect( req, traces=traces diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index e943defc596..dff64f43677 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -36,22 +36,23 @@ import chardet -__all__ = ('ClientRequest', 'ClientResponse', 'RequestInfo', 'Fingerprint') +__all__ = ('ClientRequest', 'ClientResponse', 'RequestInfo', 'Fingerprint', + 'ClientTimeout') @attr.s(frozen=True, slots=True) -class RequestTimeouts: - read_timeout = attr.ib(type=float, default=None) - connect_timeout = attr.ib(type=float, default=None) - - total_timeout = attr.ib(type=float, default=None) - pool_queue_timeout = attr.ib(type=float, default=None) - dns_resolution_timeout = attr.ib(type=float, default=None) - socket_connect_timeout = attr.ib(type=float, default=None) - connection_acquiring_timeout = attr.ib(type=float, default=None) - new_connection_timeout = attr.ib(type=float, default=None) - http_header_timeout = attr.ib(type=float, default=None) - response_body_timeout = attr.ib(type=float, default=None) +class ClientTimeout: + total = attr.ib(type=float, default=None) + sock_read = attr.ib(type=float, default=None) + connect = attr.ib(type=float, default=None) + + # pool_queue_timeout = attr.ib(type=float, default=None) + # dns_resolution_timeout = attr.ib(type=float, default=None) + # socket_connect_timeout = attr.ib(type=float, default=None) + # connection_acquiring_timeout = attr.ib(type=float, default=None) + # new_connection_timeout = attr.ib(type=float, default=None) + # http_header_timeout = attr.ib(type=float, default=None) + # response_body_timeout = attr.ib(type=float, default=None) # to create a timeout specific for a single request, either # - create a completely new one to overwrite the default @@ -59,7 +60,6 @@ class RequestTimeouts: # to overwrite the defaults - json_re = re.compile('^application/(?:[\w.+-]+?\+)?json') From 7e8787ce4cb6836a2cd6d3d8a6c7141f65c7061a Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Tue, 8 May 2018 20:11:30 +0300 Subject: [PATCH 03/14] Add ResponseHandler.set_read_timeout() --- aiohttp/client.py | 3 +- aiohttp/client_proto.py | 3 ++ aiohttp/client_reqrep.py | 4 ++- aiohttp/connector.py | 3 ++ tests/test_connector.py | 61 ++++++++++++++++++++++++++-------------- tests/test_proxy.py | 7 +++-- 6 files changed, 56 insertions(+), 25 deletions(-) diff --git a/aiohttp/client.py b/aiohttp/client.py index a7ce7886f14..023a887c833 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -333,7 +333,8 @@ async def _request(self, method, url, *, response_class=self._response_class, proxy=proxy, proxy_auth=proxy_auth, timer=timer, session=self, auto_decompress=self._auto_decompress, - ssl=ssl, proxy_headers=proxy_headers, traces=traces) + ssl=ssl, proxy_headers=proxy_headers, traces=traces, + timeout=timeout) # connection timeout try: diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index f9fa560195b..a5cc8d41cff 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -131,6 +131,9 @@ def set_response_params(self, *, timer=None, data, self._tail = self._tail, b'' self.data_received(data) + def set_read_timeout(self, timeout): + pass + def data_received(self, data): if not data: return diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index ecb2b5254af..ca88dec5c65 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -204,7 +204,8 @@ def __init__(self, method, url, *, timer=None, session=None, auto_decompress=True, ssl=None, proxy_headers=None, - traces=None): + traces=None, + timeout=None): if loop is None: loop = asyncio.get_event_loop() @@ -248,6 +249,7 @@ def __init__(self, method, url, *, if traces is None: traces = [] self._traces = traces + self._timeout = timeout def is_ssl(self): return self.url.scheme in ('https', 'wss') diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 634c01a1caa..2f35021b95b 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -467,6 +467,7 @@ async def connect(self, req, traces=None): self._acquired.add(proto) self._acquired_per_host[key].add(proto) + proto.set_read_timeout(req._timeout.sock_read) return Connection(self, key, proto, self._loop) def _get(self, key): @@ -538,6 +539,8 @@ def _release(self, key, protocol, *, should_close=False): # acquired connection is already released on connector closing return + protocol.set_read_timeout(None) + self._release_acquired(key, protocol) if self._force_close: diff --git a/tests/test_connector.py b/tests/test_connector.py index 1ae32c8f24c..e0c3ff2fb2c 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -15,7 +15,7 @@ from yarl import URL import aiohttp -from aiohttp import client, web +from aiohttp import ClientTimeout, client, web from aiohttp.client import ClientRequest from aiohttp.connector import Connection, _DNSCacheTable from aiohttp.test_utils import make_mocked_coro, unused_port @@ -477,7 +477,8 @@ def test__drop_acquire_per_host3(loop): async def test_tcp_connector_certificate_error(loop): - req = ClientRequest('GET', URL('https://127.0.0.1:443'), loop=loop) + req = ClientRequest('GET', URL('https://127.0.0.1:443'), loop=loop, + timeout=ClientTimeout(sock_read=None)) async def certificate_error(*args, **kwargs): raise ssl.CertificateError @@ -510,7 +511,8 @@ async def test_tcp_connector_multiple_hosts_errors(loop): req = ClientRequest('GET', URL('https://mocked.host'), ssl=aiohttp.Fingerprint(fingerprint), - loop=loop) + loop=loop, + timeout=ClientTimeout(sock_read=None)) async def _resolve_host(host, port, traces=None): return [{ @@ -549,7 +551,7 @@ async def create_connection(*args, **kwargs): if ip == ip4: fingerprint_error = True - tr, pr = mock.Mock(), None + tr, pr = mock.Mock(), mock.Mock() def get_extra_info(param): if param == 'sslcontext': @@ -570,7 +572,7 @@ def get_extra_info(param): if ip == ip5: connected = True - tr, pr = mock.Mock(), None + tr, pr = mock.Mock(), mock.Mock() def get_extra_info(param): if param == 'sslcontext': @@ -931,6 +933,7 @@ def test_dns_error(loop): req = ClientRequest( 'GET', URL('http://www.python.org'), loop=loop, + timeout=ClientTimeout(sock_read=None) ) with pytest.raises(aiohttp.ClientConnectorError): loop.run_until_complete(connector.connect(req)) @@ -1001,7 +1004,8 @@ async def test_connect(loop): proto = mock.Mock() proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://host:80'), loop=loop) + req = ClientRequest('GET', URL('http://host:80'), loop=loop, + timeout=ClientTimeout(sock_read=None)) conn = aiohttp.BaseConnector(loop=loop) key = ('host', 80, False) @@ -1045,7 +1049,8 @@ async def test_connect_tracing(loop): proto = mock.Mock() proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://host:80'), loop=loop) + req = ClientRequest('GET', URL('http://host:80'), loop=loop, + timeout=ClientTimeout(sock_read=None)) conn = aiohttp.BaseConnector(loop=loop) conn._create_connection = mock.Mock() @@ -1072,7 +1077,8 @@ async def test_close_during_connect(loop): proto.is_connected.return_value = True fut = loop.create_future() - req = ClientRequest('GET', URL('http://host:80'), loop=loop) + req = ClientRequest('GET', URL('http://host:80'), loop=loop, + timeout=ClientTimeout(sock_read=None)) conn = aiohttp.BaseConnector(loop=loop) conn._create_connection = mock.Mock() @@ -1376,7 +1382,8 @@ async def test_connect_with_limit(loop, key): req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop, - response_class=mock.Mock()) + response_class=mock.Mock(), + timeout=ClientTimeout(sock_read=None)) conn = aiohttp.BaseConnector(loop=loop, limit=1) conn._conns[key] = [(proto, loop.time())] @@ -1442,7 +1449,8 @@ async def test_connect_queued_operation_tracing(loop, key): req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop, - response_class=mock.Mock()) + response_class=mock.Mock(), + timeout=ClientTimeout(sock_read=None)) conn = aiohttp.BaseConnector(loop=loop, limit=1) conn._conns[key] = [(proto, loop.time())] @@ -1501,7 +1509,8 @@ async def test_connect_reuseconn_tracing(loop, key): req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop, - response_class=mock.Mock()) + response_class=mock.Mock(), + timeout=ClientTimeout(sock_read=None)) conn = aiohttp.BaseConnector(loop=loop, limit=1) conn._conns[key] = [(proto, loop.time())] @@ -1520,7 +1529,8 @@ async def test_connect_with_limit_and_limit_per_host(loop, key): proto = mock.Mock() proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop) + req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop, + timeout=ClientTimeout(sock_read=None)) conn = aiohttp.BaseConnector(loop=loop, limit=1000, limit_per_host=1) conn._conns[key] = [(proto, loop.time())] @@ -1554,7 +1564,8 @@ async def test_connect_with_no_limit_and_limit_per_host(loop, key): proto = mock.Mock() proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop) + req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop, + timeout=ClientTimeout(sock_read=None)) conn = aiohttp.BaseConnector(loop=loop, limit=0, limit_per_host=1) conn._conns[key] = [(proto, loop.time())] @@ -1586,7 +1597,8 @@ async def test_connect_with_no_limits(loop, key): proto = mock.Mock() proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop) + req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop, + timeout=ClientTimeout(sock_read=None)) conn = aiohttp.BaseConnector(loop=loop, limit=0, limit_per_host=0) conn._conns[key] = [(proto, loop.time())] @@ -1619,7 +1631,8 @@ async def test_connect_with_limit_cancelled(loop): proto = mock.Mock() proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://host:80'), loop=loop) + req = ClientRequest('GET', URL('http://host:80'), loop=loop, + timeout=ClientTimeout(sock_read=None)) conn = aiohttp.BaseConnector(loop=loop, limit=1) key = ('host', 80, False) @@ -1665,7 +1678,8 @@ async def test_connect_with_limit_concurrent(loop): proto.should_close = False proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://host:80'), loop=loop) + req = ClientRequest('GET', URL('http://host:80'), loop=loop, + timeout=ClientTimeout(sock_read=None)) max_connections = 2 num_connections = 0 @@ -1725,7 +1739,8 @@ async def test_connect_waiters_cleanup(loop): proto = mock.Mock() proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://host:80'), loop=loop) + req = ClientRequest('GET', URL('http://host:80'), loop=loop, + timeout=ClientTimeout(sock_read=None)) conn = aiohttp.BaseConnector(loop=loop, limit=1) conn._available_connections = mock.Mock(return_value=0) @@ -1744,7 +1759,8 @@ async def test_connect_waiters_cleanup_key_error(loop): proto = mock.Mock() proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://host:80'), loop=loop) + req = ClientRequest('GET', URL('http://host:80'), loop=loop, + timeout=ClientTimeout(sock_read=None)) conn = aiohttp.BaseConnector(loop=loop, limit=1) conn._available_connections = mock.Mock(return_value=0) @@ -1767,7 +1783,8 @@ async def test_close_with_acquired_connection(loop): proto = mock.Mock() proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://host:80'), loop=loop) + req = ClientRequest('GET', URL('http://host:80'), loop=loop, + timeout=ClientTimeout(sock_read=None)) conn = aiohttp.BaseConnector(loop=loop, limit=1) key = ('host', 80, False) @@ -1969,7 +1986,7 @@ def test_unix_connector_not_found(loop): req = ClientRequest( 'GET', URL('http://www.python.org'), loop=loop, - ) + timeout=ClientTimeout(sock_read=None)) with pytest.raises(aiohttp.ClientConnectorError): loop.run_until_complete(connector.connect(req)) @@ -1984,6 +2001,7 @@ def test_unix_connector_permission(loop): req = ClientRequest( 'GET', URL('http://www.python.org'), loop=loop, + timeout=ClientTimeout(sock_read=None) ) with pytest.raises(aiohttp.ClientConnectorError): loop.run_until_complete(connector.connect(req)) @@ -2001,7 +2019,8 @@ async def test_resolver_not_called_with_address_is_ip(loop): req = ClientRequest('GET', URL('http://127.0.0.1:{}'.format(unused_port())), loop=loop, - response_class=mock.Mock()) + response_class=mock.Mock(), + timeout=ClientTimeout(sock_read=None)) with pytest.raises(OSError): await connector.connect(req) diff --git a/tests/test_proxy.py b/tests/test_proxy.py index ccb8e9393eb..6c1652eb687 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -8,6 +8,7 @@ from yarl import URL import aiohttp +from aiohttp import ClientTimeout from aiohttp.client_reqrep import ClientRequest, ClientResponse from aiohttp.helpers import TimerNoop from aiohttp.test_utils import make_mocked_coro @@ -39,7 +40,8 @@ def test_connect(self, ClientRequestMock): req = ClientRequest( 'GET', URL('http://www.python.org'), proxy=URL('http://proxy.example.com'), - loop=self.loop + loop=self.loop, + timeout=ClientTimeout(sock_read=None) ) self.assertEqual(str(req.proxy), 'http://proxy.example.com') @@ -70,7 +72,8 @@ def test_proxy_headers(self, ClientRequestMock): 'GET', URL('http://www.python.org'), proxy=URL('http://proxy.example.com'), proxy_headers={'Foo': 'Bar'}, - loop=self.loop) + loop=self.loop, + timeout=ClientTimeout(sock_read=None)) self.assertEqual(str(req.proxy), 'http://proxy.example.com') # mock all the things! From b71caa272d30b1a57dab767b29054d64b491287a Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Wed, 9 May 2018 20:50:59 +0300 Subject: [PATCH 04/14] Refactor --- aiohttp/client.py | 15 +++++--- aiohttp/client_proto.py | 42 +++++++++++++++++++--- aiohttp/client_reqrep.py | 21 +++-------- aiohttp/connector.py | 6 ++-- tests/test_client_proto.py | 6 ++-- tests/test_client_request.py | 1 - tests/test_client_response.py | 55 ----------------------------- tests/test_connector.py | 65 ++++++++++++----------------------- tests/test_proxy.py | 13 +------ 9 files changed, 82 insertions(+), 142 deletions(-) diff --git a/aiohttp/client.py b/aiohttp/client.py index 023a887c833..55bd29683d6 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -332,9 +332,8 @@ async def _request(self, method, url, *, expect100=expect100, loop=self._loop, response_class=self._response_class, proxy=proxy, proxy_auth=proxy_auth, timer=timer, - session=self, auto_decompress=self._auto_decompress, - ssl=ssl, proxy_headers=proxy_headers, traces=traces, - timeout=timeout) + session=self, + ssl=ssl, proxy_headers=proxy_headers, traces=traces) # connection timeout try: @@ -351,11 +350,19 @@ async def _request(self, method, url, *, tcp_nodelay(conn.transport, True) tcp_cork(conn.transport, False) + + conn.protocol.set_response_params( + timer=timer, + skip_payload=method.upper() == 'HEAD', + read_until_eof=read_until_eof, + auto_decompress=self._auto_decompress, + read_timeout=timeout.sock_read) + try: try: resp = await req.send(conn) try: - await resp.start(conn, read_until_eof) + await resp.start(conn) except BaseException: resp.close() raise diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index a5cc8d41cff..35fab3fc841 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -1,3 +1,4 @@ +import asyncio from contextlib import suppress from .base_protocol import BaseProtocol @@ -28,6 +29,9 @@ def __init__(self, *, loop=None): self._upgraded = False self._parser = None + self._read_timeout = None + self._read_timeout_handle = None + @property def upgraded(self): return self._upgraded @@ -55,6 +59,8 @@ def is_connected(self): return self.transport is not None def connection_lost(self, exc): + self._drop_timeout() + if self._payload_parser is not None: with suppress(Exception): self._payload_parser.feed_eof() @@ -86,7 +92,7 @@ def connection_lost(self, exc): super().connection_lost(exc) def eof_received(self): - pass + self._drop_timeout() def pause_reading(self): if not self._reading_paused: @@ -95,6 +101,7 @@ def pause_reading(self): except (AttributeError, NotImplementedError, RuntimeError): pass self._reading_paused = True + self._drop_timeout() def resume_reading(self): if self._reading_paused: @@ -103,15 +110,19 @@ def resume_reading(self): except (AttributeError, NotImplementedError, RuntimeError): pass self._reading_paused = False + self._reschedule_timeout() def set_exception(self, exc): self._should_close = True + self._drop_timeout() super().set_exception(exc) def set_parser(self, parser, payload): self._payload = payload self._payload_parser = parser + self._drop_timeout() + if self._tail: data, self._tail = self._tail, b'' self.data_received(data) @@ -119,8 +130,13 @@ def set_parser(self, parser, payload): def set_response_params(self, *, timer=None, skip_payload=False, read_until_eof=False, - auto_decompress=True): + auto_decompress=True, + read_timeout=None): self._skip_payload = skip_payload + + self._read_timeout = read_timeout + self._reschedule_timeout() + self._parser = HttpResponseParser( self, self._loop, timer=timer, payload_exception=ClientPayloadError, @@ -131,8 +147,25 @@ def set_response_params(self, *, timer=None, data, self._tail = self._tail, b'' self.data_received(data) - def set_read_timeout(self, timeout): - pass + def _drop_timeout(self): + if self._read_timeout_handle is not None: + self._read_timeout_handle.cancel() + self._read_timeout_handle = None + + def _reschedule_timeout(self): + timeout = self._read_timeout + if self._read_timeout_handle is not None: + self._read_timeout_handle.cancel() + + if timeout: + self._read_timeout_handle = self._loop.call_later( + timeout, self._on_read_timeout) + else: + self._read_timeout_handle = None + + def _on_read_timeout(self): + self.set_exception( + asyncio.TimeoutError("Timeout on reading data from socket")) def data_received(self, data): if not data: @@ -160,6 +193,7 @@ def data_received(self, data): self.transport.close() # should_close is True after the call self.set_exception(exc) + self._drop_timeout() return self._upgraded = upgraded diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index ca88dec5c65..742c3ccfd65 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -201,11 +201,10 @@ def __init__(self, method, url, *, chunked=None, expect100=False, loop=None, response_class=None, proxy=None, proxy_auth=None, - timer=None, session=None, auto_decompress=True, + timer=None, session=None, ssl=None, proxy_headers=None, - traces=None, - timeout=None): + traces=None): if loop is None: loop = asyncio.get_event_loop() @@ -227,7 +226,6 @@ def __init__(self, method, url, *, self.length = None self.response_class = response_class or ClientResponse self._timer = timer if timer is not None else TimerNoop() - self._auto_decompress = auto_decompress self._ssl = ssl if loop.get_debug(): @@ -249,7 +247,6 @@ def __init__(self, method, url, *, if traces is None: traces = [] self._traces = traces - self._timeout = timeout def is_ssl(self): return self.url.scheme in ('https', 'wss') @@ -558,7 +555,6 @@ async def send(self, conn): self.method, self.original_url, writer=self._writer, continue100=self._continue, timer=self._timer, request_info=self.request_info, - auto_decompress=self._auto_decompress, traces=self._traces, loop=self.loop, session=self._session @@ -604,7 +600,7 @@ class ClientResponse(HeadersMixin): def __init__(self, method, url, *, writer, continue100, timer, - request_info, auto_decompress, + request_info, traces, loop, session): assert isinstance(url, URL) @@ -621,7 +617,6 @@ def __init__(self, method, url, *, self._history = () self._request_info = request_info self._timer = timer if timer is not None else TimerNoop() - self._auto_decompress = auto_decompress # True by default self._cache = {} # required for @reify method decorator self._traces = traces self._loop = loop @@ -742,23 +737,17 @@ def links(self): return MultiDictProxy(links) - async def start(self, connection, read_until_eof=False): + async def start(self, connection): """Start response processing.""" self._closed = False self._protocol = connection.protocol self._connection = connection - connection.protocol.set_response_params( - timer=self._timer, - skip_payload=self.method.lower() == 'head', - read_until_eof=read_until_eof, - auto_decompress=self._auto_decompress) - with self._timer: while True: # read response try: - (message, payload) = await self._protocol.read() + message, payload = await self._protocol.read() except http.HttpProcessingError as exc: raise ClientResponseError( self.request_info, self.history, diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 2f35021b95b..8860866fdeb 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -467,7 +467,6 @@ async def connect(self, req, traces=None): self._acquired.add(proto) self._acquired_per_host[key].add(proto) - proto.set_read_timeout(req._timeout.sock_read) return Connection(self, key, proto, self._loop) def _get(self, key): @@ -539,8 +538,6 @@ def _release(self, key, protocol, *, should_close=False): # acquired connection is already released on connector closing return - protocol.set_read_timeout(None) - self._release_acquired(key, protocol) if self._force_close: @@ -925,7 +922,8 @@ async def _create_proxy_connection(self, req, traces=None): conn = Connection(self, key, proto, self._loop) proxy_resp = await proxy_req.send(conn) try: - resp = await proxy_resp.start(conn, True) + conn._protocol.set_response_params() + resp = await proxy_resp.start(conn) except BaseException: proxy_resp.close() conn.close() diff --git a/tests/test_client_proto.py b/tests/test_client_proto.py index 9868b560ce8..af3a417eecc 100644 --- a/tests/test_client_proto.py +++ b/tests/test_client_proto.py @@ -33,7 +33,7 @@ async def test_client_proto_bad_message(loop): proto = ResponseHandler(loop=loop) transport = mock.Mock() proto.connection_made(transport) - proto.set_response_params(read_until_eof=True) + proto.set_response_params() proto.data_received(b'HTTP\r\n\r\n') assert proto.should_close @@ -71,11 +71,11 @@ async def test_client_protocol_readuntil_eof(loop): continue100=None, timer=TimerNoop(), request_info=mock.Mock(), - auto_decompress=True, traces=[], loop=loop, session=mock.Mock()) - await response.start(conn, read_until_eof=True) + proto.set_response_params() + await response.start(conn) assert not response.content.is_eof() diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 22374f03fdf..3155f4a437a 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -1210,7 +1210,6 @@ async def send(self, conn): continue100=self._continue, timer=self._timer, request_info=self.request_info, - auto_decompress=self._auto_decompress, traces=self._traces, loop=self.loop, session=self._session) diff --git a/tests/test_client_response.py b/tests/test_client_response.py index 8eefa4b9fce..9dbf4d05036 100644 --- a/tests/test_client_response.py +++ b/tests/test_client_response.py @@ -29,7 +29,6 @@ async def test_http_processing_error(session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -54,7 +53,6 @@ def test_del(session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -79,7 +77,6 @@ def test_close(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -97,7 +94,6 @@ def test_wait_for_100_1(loop, session): request_info=mock.Mock(), writer=mock.Mock(), timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -112,7 +108,6 @@ def test_wait_for_100_2(loop, session): continue100=None, writer=mock.Mock(), timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -126,7 +121,6 @@ def test_repr(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -142,7 +136,6 @@ def test_repr_non_ascii_url(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock()) @@ -156,7 +149,6 @@ def test_repr_non_ascii_reason(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock()) @@ -171,7 +163,6 @@ def test_url_obj_deprecated(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock()) @@ -185,7 +176,6 @@ async def test_read_and_release_connection(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -208,7 +198,6 @@ async def test_read_and_release_connection_with_error(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -227,7 +216,6 @@ async def test_release(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -252,7 +240,6 @@ def run(conn): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -270,7 +257,6 @@ async def test_response_eof(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -289,7 +275,6 @@ async def test_response_eof_upgraded(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -308,7 +293,6 @@ async def test_response_eof_after_connection_detach(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -327,7 +311,6 @@ async def test_text(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -353,7 +336,6 @@ async def test_text_bad_encoding(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -382,7 +364,6 @@ async def test_text_custom_encoding(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -410,7 +391,6 @@ async def test_text_detect_encoding(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -436,7 +416,6 @@ async def test_text_detect_encoding_if_invalid_charset(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -463,7 +442,6 @@ async def test_text_after_read(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -489,7 +467,6 @@ async def test_json(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -515,7 +492,6 @@ async def test_json_extended_content_type(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -542,7 +518,6 @@ async def test_json_custom_content_type(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -568,7 +543,6 @@ async def test_json_custom_loader(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -589,7 +563,6 @@ async def test_json_invalid_content_type(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -609,7 +582,6 @@ async def test_json_no_content(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -627,7 +599,6 @@ async def test_json_override_encoding(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -655,7 +626,6 @@ def test_get_encoding_unknown(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -672,7 +642,6 @@ def test_raise_for_status_2xx(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock()) @@ -687,7 +656,6 @@ def test_raise_for_status_4xx(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock()) @@ -705,7 +673,6 @@ def test_resp_host(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock()) @@ -718,7 +685,6 @@ def test_content_type(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock()) @@ -733,7 +699,6 @@ def test_content_type_no_header(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock()) @@ -748,7 +713,6 @@ def test_charset(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock()) @@ -763,7 +727,6 @@ def test_charset_no_header(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock()) @@ -778,7 +741,6 @@ def test_charset_no_charset(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock()) @@ -793,7 +755,6 @@ def test_content_disposition_full(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock()) @@ -813,7 +774,6 @@ def test_content_disposition_no_parameters(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock()) @@ -830,7 +790,6 @@ def test_content_disposition_no_header(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock()) @@ -845,7 +804,6 @@ def test_content_disposition_cache(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock()) @@ -868,7 +826,6 @@ def test_response_request_info(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock() @@ -892,7 +849,6 @@ def test_request_info_in_exception(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock() @@ -918,7 +874,6 @@ def test_no_redirect_history_in_exception(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock() @@ -948,7 +903,6 @@ def test_redirect_history_in_exception(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock() @@ -967,7 +921,6 @@ def test_redirect_history_in_exception(): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=mock.Mock(), session=mock.Mock() @@ -994,7 +947,6 @@ async def test_response_read_triggers_callback(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, loop=loop, session=session, traces=[trace] @@ -1028,7 +980,6 @@ def test_response_real_url(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -1043,7 +994,6 @@ def test_response_links_comma_separated(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -1073,7 +1023,6 @@ def test_response_links_multiple_headers(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -1106,7 +1055,6 @@ def test_response_links_no_rel(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -1132,7 +1080,6 @@ def test_response_links_quoted(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -1158,7 +1105,6 @@ def test_response_links_relative(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) @@ -1184,7 +1130,6 @@ def test_response_links_empty(loop, session): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=loop, session=session) diff --git a/tests/test_connector.py b/tests/test_connector.py index e0c3ff2fb2c..8cc2722b6b0 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -15,7 +15,7 @@ from yarl import URL import aiohttp -from aiohttp import ClientTimeout, client, web +from aiohttp import client, web from aiohttp.client import ClientRequest from aiohttp.connector import Connection, _DNSCacheTable from aiohttp.test_utils import make_mocked_coro, unused_port @@ -477,8 +477,7 @@ def test__drop_acquire_per_host3(loop): async def test_tcp_connector_certificate_error(loop): - req = ClientRequest('GET', URL('https://127.0.0.1:443'), loop=loop, - timeout=ClientTimeout(sock_read=None)) + req = ClientRequest('GET', URL('https://127.0.0.1:443'), loop=loop) async def certificate_error(*args, **kwargs): raise ssl.CertificateError @@ -511,8 +510,7 @@ async def test_tcp_connector_multiple_hosts_errors(loop): req = ClientRequest('GET', URL('https://mocked.host'), ssl=aiohttp.Fingerprint(fingerprint), - loop=loop, - timeout=ClientTimeout(sock_read=None)) + loop=loop) async def _resolve_host(host, port, traces=None): return [{ @@ -932,9 +930,8 @@ def test_dns_error(loop): req = ClientRequest( 'GET', URL('http://www.python.org'), - loop=loop, - timeout=ClientTimeout(sock_read=None) - ) + loop=loop) + with pytest.raises(aiohttp.ClientConnectorError): loop.run_until_complete(connector.connect(req)) @@ -1004,8 +1001,7 @@ async def test_connect(loop): proto = mock.Mock() proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://host:80'), loop=loop, - timeout=ClientTimeout(sock_read=None)) + req = ClientRequest('GET', URL('http://host:80'), loop=loop) conn = aiohttp.BaseConnector(loop=loop) key = ('host', 80, False) @@ -1049,8 +1045,7 @@ async def test_connect_tracing(loop): proto = mock.Mock() proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://host:80'), loop=loop, - timeout=ClientTimeout(sock_read=None)) + req = ClientRequest('GET', URL('http://host:80'), loop=loop) conn = aiohttp.BaseConnector(loop=loop) conn._create_connection = mock.Mock() @@ -1077,8 +1072,7 @@ async def test_close_during_connect(loop): proto.is_connected.return_value = True fut = loop.create_future() - req = ClientRequest('GET', URL('http://host:80'), loop=loop, - timeout=ClientTimeout(sock_read=None)) + req = ClientRequest('GET', URL('http://host:80'), loop=loop) conn = aiohttp.BaseConnector(loop=loop) conn._create_connection = mock.Mock() @@ -1382,8 +1376,7 @@ async def test_connect_with_limit(loop, key): req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop, - response_class=mock.Mock(), - timeout=ClientTimeout(sock_read=None)) + response_class=mock.Mock()) conn = aiohttp.BaseConnector(loop=loop, limit=1) conn._conns[key] = [(proto, loop.time())] @@ -1449,8 +1442,7 @@ async def test_connect_queued_operation_tracing(loop, key): req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop, - response_class=mock.Mock(), - timeout=ClientTimeout(sock_read=None)) + response_class=mock.Mock()) conn = aiohttp.BaseConnector(loop=loop, limit=1) conn._conns[key] = [(proto, loop.time())] @@ -1509,8 +1501,7 @@ async def test_connect_reuseconn_tracing(loop, key): req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop, - response_class=mock.Mock(), - timeout=ClientTimeout(sock_read=None)) + response_class=mock.Mock()) conn = aiohttp.BaseConnector(loop=loop, limit=1) conn._conns[key] = [(proto, loop.time())] @@ -1529,8 +1520,7 @@ async def test_connect_with_limit_and_limit_per_host(loop, key): proto = mock.Mock() proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop, - timeout=ClientTimeout(sock_read=None)) + req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop) conn = aiohttp.BaseConnector(loop=loop, limit=1000, limit_per_host=1) conn._conns[key] = [(proto, loop.time())] @@ -1564,8 +1554,7 @@ async def test_connect_with_no_limit_and_limit_per_host(loop, key): proto = mock.Mock() proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop, - timeout=ClientTimeout(sock_read=None)) + req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop) conn = aiohttp.BaseConnector(loop=loop, limit=0, limit_per_host=1) conn._conns[key] = [(proto, loop.time())] @@ -1597,8 +1586,7 @@ async def test_connect_with_no_limits(loop, key): proto = mock.Mock() proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop, - timeout=ClientTimeout(sock_read=None)) + req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop) conn = aiohttp.BaseConnector(loop=loop, limit=0, limit_per_host=0) conn._conns[key] = [(proto, loop.time())] @@ -1631,8 +1619,7 @@ async def test_connect_with_limit_cancelled(loop): proto = mock.Mock() proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://host:80'), loop=loop, - timeout=ClientTimeout(sock_read=None)) + req = ClientRequest('GET', URL('http://host:80'), loop=loop) conn = aiohttp.BaseConnector(loop=loop, limit=1) key = ('host', 80, False) @@ -1678,8 +1665,7 @@ async def test_connect_with_limit_concurrent(loop): proto.should_close = False proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://host:80'), loop=loop, - timeout=ClientTimeout(sock_read=None)) + req = ClientRequest('GET', URL('http://host:80'), loop=loop) max_connections = 2 num_connections = 0 @@ -1739,8 +1725,7 @@ async def test_connect_waiters_cleanup(loop): proto = mock.Mock() proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://host:80'), loop=loop, - timeout=ClientTimeout(sock_read=None)) + req = ClientRequest('GET', URL('http://host:80'), loop=loop) conn = aiohttp.BaseConnector(loop=loop, limit=1) conn._available_connections = mock.Mock(return_value=0) @@ -1759,8 +1744,7 @@ async def test_connect_waiters_cleanup_key_error(loop): proto = mock.Mock() proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://host:80'), loop=loop, - timeout=ClientTimeout(sock_read=None)) + req = ClientRequest('GET', URL('http://host:80'), loop=loop) conn = aiohttp.BaseConnector(loop=loop, limit=1) conn._available_connections = mock.Mock(return_value=0) @@ -1783,8 +1767,7 @@ async def test_close_with_acquired_connection(loop): proto = mock.Mock() proto.is_connected.return_value = True - req = ClientRequest('GET', URL('http://host:80'), loop=loop, - timeout=ClientTimeout(sock_read=None)) + req = ClientRequest('GET', URL('http://host:80'), loop=loop) conn = aiohttp.BaseConnector(loop=loop, limit=1) key = ('host', 80, False) @@ -1985,8 +1968,7 @@ def test_unix_connector_not_found(loop): req = ClientRequest( 'GET', URL('http://www.python.org'), - loop=loop, - timeout=ClientTimeout(sock_read=None)) + loop=loop) with pytest.raises(aiohttp.ClientConnectorError): loop.run_until_complete(connector.connect(req)) @@ -2000,9 +1982,7 @@ def test_unix_connector_permission(loop): req = ClientRequest( 'GET', URL('http://www.python.org'), - loop=loop, - timeout=ClientTimeout(sock_read=None) - ) + loop=loop) with pytest.raises(aiohttp.ClientConnectorError): loop.run_until_complete(connector.connect(req)) @@ -2019,8 +1999,7 @@ async def test_resolver_not_called_with_address_is_ip(loop): req = ClientRequest('GET', URL('http://127.0.0.1:{}'.format(unused_port())), loop=loop, - response_class=mock.Mock(), - timeout=ClientTimeout(sock_read=None)) + response_class=mock.Mock()) with pytest.raises(OSError): await connector.connect(req) diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 6c1652eb687..9fea4ce350b 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -8,7 +8,6 @@ from yarl import URL import aiohttp -from aiohttp import ClientTimeout from aiohttp.client_reqrep import ClientRequest, ClientResponse from aiohttp.helpers import TimerNoop from aiohttp.test_utils import make_mocked_coro @@ -41,7 +40,6 @@ def test_connect(self, ClientRequestMock): 'GET', URL('http://www.python.org'), proxy=URL('http://proxy.example.com'), loop=self.loop, - timeout=ClientTimeout(sock_read=None) ) self.assertEqual(str(req.proxy), 'http://proxy.example.com') @@ -72,8 +70,7 @@ def test_proxy_headers(self, ClientRequestMock): 'GET', URL('http://www.python.org'), proxy=URL('http://proxy.example.com'), proxy_headers={'Foo': 'Bar'}, - loop=self.loop, - timeout=ClientTimeout(sock_read=None)) + loop=self.loop) self.assertEqual(str(req.proxy), 'http://proxy.example.com') # mock all the things! @@ -154,7 +151,6 @@ def test_https_connect(self, ClientRequestMock): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=self.loop, session=mock.Mock()) @@ -197,7 +193,6 @@ def test_https_connect_certificate_error(self, ClientRequestMock): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=self.loop, session=mock.Mock()) @@ -246,7 +241,6 @@ def test_https_connect_ssl_error(self, ClientRequestMock): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=self.loop, session=mock.Mock()) @@ -295,7 +289,6 @@ def test_https_connect_runtime_error(self, ClientRequestMock): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=self.loop, session=mock.Mock()) @@ -335,7 +328,6 @@ def test_https_connect_http_proxy_error(self, ClientRequestMock): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=self.loop, session=mock.Mock()) @@ -376,7 +368,6 @@ def test_https_connect_resp_start_error(self, ClientRequestMock): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=self.loop, session=mock.Mock()) @@ -450,7 +441,6 @@ def test_https_connect_pass_ssl_context(self, ClientRequestMock): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=self.loop, session=mock.Mock()) @@ -501,7 +491,6 @@ def test_https_auth(self, ClientRequestMock): writer=mock.Mock(), continue100=None, timer=TimerNoop(), - auto_decompress=True, traces=[], loop=self.loop, session=mock.Mock()) From 1520b1bc3f62a5770f291f514316608ff405f271 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Wed, 9 May 2018 22:46:45 +0300 Subject: [PATCH 05/14] Unregister read timeout handler on EOF --- aiohttp/client_proto.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 35fab3fc841..c7875b8119b 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -193,11 +193,11 @@ def data_received(self, data): self.transport.close() # should_close is True after the call self.set_exception(exc) - self._drop_timeout() return self._upgraded = upgraded + payload = None for message, payload in messages: if message.should_close: self._should_close = True @@ -209,6 +209,15 @@ def data_received(self, data): self.feed_data((message, EMPTY_PAYLOAD), 0) else: self.feed_data((message, payload), 0) + if payload is not None: + # new message(s) was processed + # register timeout handler unsubscribing + # either on end-of-stream or immediatelly for + # EMPTY_PAYLOAD + if payload is not EMPTY_PAYLOAD: + payload.on_eof(self._drop_timeout) + else: + self._drop_timeout() if tail: if upgraded: From fd4dd4edc1b6fcd3a76162e3f301cd9f84755279 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Wed, 9 May 2018 22:51:37 +0300 Subject: [PATCH 06/14] Drop unused variable --- aiohttp/client_proto.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index c7875b8119b..54245767f0c 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -17,7 +17,6 @@ def __init__(self, *, loop=None): self._should_close = False - self._message = None self._payload = None self._skip_payload = False self._payload_parser = None @@ -84,7 +83,6 @@ def connection_lost(self, exc): self._should_close = True self._parser = None - self._message = None self._payload = None self._payload_parser = None self._reading_paused = False @@ -202,7 +200,6 @@ def data_received(self, data): if message.should_close: self._should_close = True - self._message = message self._payload = payload if self._skip_payload or message.code in (204, 304): From cc3b3d3a5ce52c8db4d3658c77debcee614eccf3 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Wed, 9 May 2018 23:16:49 +0300 Subject: [PATCH 07/14] Fix flacky test --- tests/test_client_proto.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_client_proto.py b/tests/test_client_proto.py index af3a417eecc..361f766f918 100644 --- a/tests/test_client_proto.py +++ b/tests/test_client_proto.py @@ -74,7 +74,7 @@ async def test_client_protocol_readuntil_eof(loop): traces=[], loop=loop, session=mock.Mock()) - proto.set_response_params() + proto.set_response_params(read_until_eof=True) await response.start(conn) assert not response.content.is_eof() From 69df741666b5c1c476f2c9d3d2a5a9eedc3f5e60 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Thu, 10 May 2018 08:27:26 +0300 Subject: [PATCH 08/14] More tests --- aiohttp/client_proto.py | 1 + tests/test_client_proto.py | 33 +++++++++++++++++++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 54245767f0c..4d9d9064729 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -90,6 +90,7 @@ def connection_lost(self, exc): super().connection_lost(exc) def eof_received(self): + # should call parser.feed_eof() most likely self._drop_timeout() def pause_reading(self): diff --git a/tests/test_client_proto.py b/tests/test_client_proto.py index 361f766f918..3d346f5db24 100644 --- a/tests/test_client_proto.py +++ b/tests/test_client_proto.py @@ -96,3 +96,36 @@ async def test_empty_data(loop): proto.data_received(b'') # do nothing + + +async def test_schedule_timeout(loop): + proto = ResponseHandler(loop=loop) + proto.set_response_params(read_timeout=1) + assert proto._read_timeout_handle is not None + + +async def test_drop_timeout(loop): + proto = ResponseHandler(loop=loop) + proto.set_response_params(read_timeout=1) + assert proto._read_timeout_handle is not None + proto._drop_timeout() + assert proto._read_timeout_handle is None + + +async def test_reschedule_timeout(loop): + proto = ResponseHandler(loop=loop) + proto.set_response_params(read_timeout=1) + assert proto._read_timeout_handle is not None + h = proto._read_timeout_handle + proto._reschedule_timeout() + assert proto._read_timeout_handle is not None + assert proto._read_timeout_handle is not h + + + +async def test_eof_received(loop): + proto = ResponseHandler(loop=loop) + proto.set_response_params(read_timeout=1) + assert proto._read_timeout_handle is not None + proto.eof_received() + assert proto._read_timeout_handle is None From 4ec1e7b505c3022312881cad0ed0ebeba36a2b6a Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Thu, 10 May 2018 08:55:20 +0300 Subject: [PATCH 09/14] Fix flake8 error --- tests/test_client_proto.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_client_proto.py b/tests/test_client_proto.py index 3d346f5db24..64c3b2cd7ec 100644 --- a/tests/test_client_proto.py +++ b/tests/test_client_proto.py @@ -122,7 +122,6 @@ async def test_reschedule_timeout(loop): assert proto._read_timeout_handle is not h - async def test_eof_received(loop): proto = ResponseHandler(loop=loop) proto.set_response_params(read_timeout=1) From 1447c8cec6f3e6b23a3987270c33366d41191d48 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Thu, 10 May 2018 10:20:16 +0300 Subject: [PATCH 10/14] Add functional test for read timeout --- aiohttp/client_proto.py | 6 +++--- tests/test_client_functional.py | 15 +++++++++++++++ 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 4d9d9064729..a65fffd5571 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -1,9 +1,9 @@ -import asyncio from contextlib import suppress from .base_protocol import BaseProtocol from .client_exceptions import (ClientOSError, ClientPayloadError, - ServerDisconnectedError) + ServerDisconnectedError, + ServerTimeoutError) from .http import HttpResponseParser from .streams import EMPTY_PAYLOAD, DataQueue @@ -164,7 +164,7 @@ def _reschedule_timeout(self): def _on_read_timeout(self): self.set_exception( - asyncio.TimeoutError("Timeout on reading data from socket")) + ServerTimeoutError("Timeout on reading data from socket")) def data_received(self, data): if not data: diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 7fc8af3c430..620741511b8 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -2598,3 +2598,18 @@ async def handler(request): with pytest.raises(aiohttp.ClientConnectionError): await resp.content.readline() + + +async def test_read_timeout(aiohttp_client): + async def handler(request): + await asyncio.sleep(5) + return web.Response() + + app = web.Application() + app.add_routes([web.get('/', handler)]) + + timeout = aiohttp.ClientTimeout(sock_read=0.1) + client = await aiohttp_client(app, timeout=timeout) + + with pytest.raises(aiohttp.ServerTimeoutError): + await client.get('/') From 7208d9f585570d4d6da797bc5948437f1e154371 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Thu, 10 May 2018 10:37:36 +0300 Subject: [PATCH 11/14] Cleanup --- aiohttp/client.py | 26 +++++++++++++++++++++++--- aiohttp/client_proto.py | 3 +-- aiohttp/client_reqrep.py | 23 +---------------------- 3 files changed, 25 insertions(+), 27 deletions(-) diff --git a/aiohttp/client.py b/aiohttp/client.py index 55bd29683d6..cefdddbdfdd 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -22,8 +22,7 @@ ServerTimeoutError, TooManyRedirects, WSServerHandshakeError) from .client_reqrep import * # noqa -from .client_reqrep import (ClientRequest, ClientResponse, ClientTimeout, - _merge_ssl_params) +from .client_reqrep import ClientRequest, ClientResponse, _merge_ssl_params from .client_ws import ClientWebSocketResponse from .connector import * # noqa from .connector import TCPConnector @@ -40,7 +39,28 @@ __all__ = (client_exceptions.__all__ + # noqa client_reqrep.__all__ + # noqa connector_mod.__all__ + # noqa - ('ClientSession', 'ClientWebSocketResponse', 'request')) + ('ClientSession', 'ClientTimeout', + 'ClientWebSocketResponse', 'request')) + + +@attr.s(frozen=True, slots=True) +class ClientTimeout: + total = attr.ib(type=float, default=None) + sock_read = attr.ib(type=float, default=None) + connect = attr.ib(type=float, default=None) + + # pool_queue_timeout = attr.ib(type=float, default=None) + # dns_resolution_timeout = attr.ib(type=float, default=None) + # socket_connect_timeout = attr.ib(type=float, default=None) + # connection_acquiring_timeout = attr.ib(type=float, default=None) + # new_connection_timeout = attr.ib(type=float, default=None) + # http_header_timeout = attr.ib(type=float, default=None) + # response_body_timeout = attr.ib(type=float, default=None) + + # to create a timeout specific for a single request, either + # - create a completely new one to overwrite the default + # - or use http://www.attrs.org/en/stable/api.html#attr.evolve + # to overwrite the defaults # 5 Minute default read timeout diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index a65fffd5571..91fa45a2d3a 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -2,8 +2,7 @@ from .base_protocol import BaseProtocol from .client_exceptions import (ClientOSError, ClientPayloadError, - ServerDisconnectedError, - ServerTimeoutError) + ServerDisconnectedError, ServerTimeoutError) from .http import HttpResponseParser from .streams import EMPTY_PAYLOAD, DataQueue diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 742c3ccfd65..cd6d598de05 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -36,28 +36,7 @@ import chardet -__all__ = ('ClientRequest', 'ClientResponse', 'RequestInfo', 'Fingerprint', - 'ClientTimeout') - - -@attr.s(frozen=True, slots=True) -class ClientTimeout: - total = attr.ib(type=float, default=None) - sock_read = attr.ib(type=float, default=None) - connect = attr.ib(type=float, default=None) - - # pool_queue_timeout = attr.ib(type=float, default=None) - # dns_resolution_timeout = attr.ib(type=float, default=None) - # socket_connect_timeout = attr.ib(type=float, default=None) - # connection_acquiring_timeout = attr.ib(type=float, default=None) - # new_connection_timeout = attr.ib(type=float, default=None) - # http_header_timeout = attr.ib(type=float, default=None) - # response_body_timeout = attr.ib(type=float, default=None) - - # to create a timeout specific for a single request, either - # - create a completely new one to overwrite the default - # - or use http://www.attrs.org/en/stable/api.html#attr.evolve - # to overwrite the defaults +__all__ = ('ClientRequest', 'ClientResponse', 'RequestInfo', 'Fingerprint') json_re = re.compile('^application/(?:[\w.+-]+?\+)?json') From aa4f40dfcb91b4fc73abe167355435c4aa2ad760 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Thu, 10 May 2018 12:21:48 +0300 Subject: [PATCH 12/14] Add docs --- aiohttp/client.py | 2 +- docs/client_quickstart.rst | 40 +++++++++++++++++++++------ docs/client_reference.rst | 56 ++++++++++++++++++++++++++++++++++---- 3 files changed, 84 insertions(+), 14 deletions(-) diff --git a/aiohttp/client.py b/aiohttp/client.py index cefdddbdfdd..46affcf6acb 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -46,8 +46,8 @@ @attr.s(frozen=True, slots=True) class ClientTimeout: total = attr.ib(type=float, default=None) - sock_read = attr.ib(type=float, default=None) connect = attr.ib(type=float, default=None) + sock_read = attr.ib(type=float, default=None) # pool_queue_timeout = attr.ib(type=float, default=None) # dns_resolution_timeout = attr.ib(type=float, default=None) diff --git a/docs/client_quickstart.rst b/docs/client_quickstart.rst index d2619b5be3d..514fbc432df 100644 --- a/docs/client_quickstart.rst +++ b/docs/client_quickstart.rst @@ -394,16 +394,40 @@ multiple writer tasks which can only send data asynchronously (by Timeouts ======== -By default all IO operations have 5min timeout. The timeout may be -overridden by passing ``timeout`` parameter into -:meth:`ClientSession.get` and family:: +Timeout settings a stored in :class:`ClientTimeout` data structure. - async with session.get('https://github.com', timeout=60) as r: +By default *aiohttp* uses a *total* 5min timeout, it means that the +whole operation should finish in 5 minutes. + +The value could be overridden by *timeout* parameter for the session:: + + timeout = aiohttp.ClientTimeout(total=60) + async with aiohttp.ClientSession(timeout=timeout) as session: ... -``None`` or ``0`` disables timeout check. +Timeout could be overridden for a request like :meth:`ClientSession.get`:: -.. note:: + async with session.get(url, timeout=timeout) as resp: + ... + +Supported :class:`ClientTimeout` fields are: + + ``total`` + + The whole operation time including connection + establishment, request sending and response reading. + + ``connect`` + + The maximum time for connection establishment. + + ``sock_read`` + + The maximum allowed timeout for period between reading a new + data portion from a peer. + +All fields a floats, ``None`` or ``0`` disables a particular timeout check. + +Thus the default timeout is:: - Timeout is cumulative time, it includes all operations like sending request, - redirects, response parsing, consuming response, etc. + aiohttp.ClientTimeout(total=5*60, connect=None, sock_read=None) diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 19d87da830e..46cad3dc316 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -45,6 +45,7 @@ The client session supports the context manager protocol for self closing. version=aiohttp.HttpVersion11, \ cookie_jar=None, read_timeout=None, \ conn_timeout=None, \ + timeout=sentinel, \ raise_for_status=False, \ connector_owner=True, \ auto_decompress=True, proxies=None) @@ -113,16 +114,27 @@ The client session supports the context manager protocol for self closing. Automatically call :meth:`ClientResponse.raise_for_status()` for each response, ``False`` by default. - .. versionadded:: 2.0 + :param timeout: a :class:`ClientTimeout` settings structure, 5min + total timeout by default. + + .. versionadded:: 3.3 :param float read_timeout: Request operations timeout. ``read_timeout`` is cumulative for all request operations (request, redirects, responses, data consuming). By default, the read timeout is 5*60 seconds. Use ``None`` or ``0`` to disable timeout checks. + .. deprecated:: 3.3 + + Use ``timeout`` parameter instead. + :param float conn_timeout: timeout for connection establishing (optional). Values ``0`` or ``None`` mean no timeout. + .. deprecated:: 3.3 + + Use ``timeout`` parameter instead. + :param bool connector_owner: Close connector instance on session closing. @@ -197,7 +209,7 @@ The client session supports the context manager protocol for self closing. max_redirects=10,\ compress=None, chunked=None, expect100=False,\ read_until_eof=True, proxy=None, proxy_auth=None,\ - timeout=5*60, ssl=None, \ + timeout=sentinel, ssl=None, \ verify_ssl=None, fingerprint=None, \ ssl_context=None, proxy_headers=None) :async-with: @@ -278,8 +290,15 @@ The client session supports the context manager protocol for self closing. :param aiohttp.BasicAuth proxy_auth: an object that represents proxy HTTP Basic Authorization (optional) - :param int timeout: override the session's timeout - (``read_timeout``) for IO operations. + :param int timeout: override the session's timeout. + + .. versionchanged:: 3.3 + + The parameter is :class:`ClientTimeout` instance, + :class:`float` is still supported for sake of backward + compatibility. + + If :class:`float` is passed it is a *total* timeout. :param ssl: SSL validation mode. ``None`` for default SSL check (:func:`ssl.create_default_context` is used), @@ -1449,12 +1468,39 @@ Utilities --------- +ClientTimeout +^^^^^^^^^^^^^ + +.. class:: ClientTimeout(*, total=None, connect=None, sock_read=None) + + A data class for client timeout settings. + + .. attribute:: total + + Total timeout for the whole request. + + :class:`float`, ``None`` by default. + + .. attribute:: connect + + A timeout for connecting to a peer. + + :class:`float`, ``None`` by default. + + .. attribute:: sock_read + + A timeout for reading a portion of data from a peer. + + :class:`float`, ``None`` by default. + + .. versionadded:: 3.3 + RequestInfo ^^^^^^^^^^^ .. class:: RequestInfo() - A namedtuple with request URL and headers from :class:`ClientRequest` + A data class with request URL and headers from :class:`ClientRequest` object, available as :attr:`ClientResponse.request_info` attribute. .. attribute:: url From 603017e7c384122194c277372ccb5de352d45177 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Thu, 10 May 2018 12:22:46 +0300 Subject: [PATCH 13/14] Add changelog --- CHANGES/2768.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 CHANGES/2768.feature diff --git a/CHANGES/2768.feature b/CHANGES/2768.feature new file mode 100644 index 00000000000..0418e638db0 --- /dev/null +++ b/CHANGES/2768.feature @@ -0,0 +1 @@ +Implement ``ClientTimeout`` class and support socket read timeout. \ No newline at end of file From 64e98f67734c9c552c599c4e90af75a33466893c Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Fri, 11 May 2018 15:03:13 +0300 Subject: [PATCH 14/14] Implement ClientTimeout.sock_connect --- aiohttp/client.py | 4 +- aiohttp/connector.py | 48 ++++++++++------------ docs/client_reference.rst | 18 +++++++- tests/test_client_request.py | 2 +- tests/test_client_session.py | 8 ++-- tests/test_connector.py | 80 ++++++++++++++++++------------------ tests/test_proxy.py | 41 +++++++++++------- 7 files changed, 112 insertions(+), 89 deletions(-) diff --git a/aiohttp/client.py b/aiohttp/client.py index 46affcf6acb..d70b5f7a966 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -48,6 +48,7 @@ class ClientTimeout: total = attr.ib(type=float, default=None) connect = attr.ib(type=float, default=None) sock_read = attr.ib(type=float, default=None) + sock_connect = attr.ib(type=float, default=None) # pool_queue_timeout = attr.ib(type=float, default=None) # dns_resolution_timeout = attr.ib(type=float, default=None) @@ -361,7 +362,8 @@ async def _request(self, method, url, *, loop=self._loop): conn = await self._connector.connect( req, - traces=traces + traces=traces, + timeout=timeout ) except asyncio.TimeoutError as exc: raise ServerTimeoutError( diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 863a1943a6a..ab4e8feb18a 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -22,7 +22,7 @@ ssl_errors) from .client_proto import ResponseHandler from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params -from .helpers import PY_36, is_ip_address, noop, sentinel +from .helpers import PY_36, CeilTimeout, is_ip_address, noop, sentinel from .locks import EventResultOrError from .resolver import DefaultResolver @@ -391,7 +391,7 @@ def _available_connections(self, key): return available - async def connect(self, req, traces=None): + async def connect(self, req, traces, timeout): """Get from pool or create new connection.""" key = req.connection_key available = self._available_connections(key) @@ -442,10 +442,7 @@ async def connect(self, req, traces=None): await trace.send_connection_create_start() try: - proto = await self._create_connection( - req, - traces=traces - ) + proto = await self._create_connection(req, traces, timeout) if self._closed: proto.close() raise ClientConnectionError("Connector is closed.") @@ -561,7 +558,7 @@ def _release(self, key, protocol, *, should_close=False): self._cleanup_handle = helpers.weakref_handle( self, '_cleanup', self._keepalive_timeout, self._loop) - async def _create_connection(self, req, traces=None): + async def _create_connection(self, req, traces, timeout): raise NotImplementedError() @@ -747,21 +744,17 @@ async def _resolve_host(self, host, port, traces=None): return self._cached_hosts.next_addrs(key) - async def _create_connection(self, req, traces=None): + async def _create_connection(self, req, traces, timeout): """Create connection. Has same keyword arguments as BaseEventLoop.create_connection. """ if req.proxy: _, proto = await self._create_proxy_connection( - req, - traces=traces - ) + req, traces, timeout) else: _, proto = await self._create_direct_connection( - req, - traces=traces - ) + req, traces, timeout) return proto @@ -821,10 +814,12 @@ def _get_fingerprint(self, req): return None async def _wrap_create_connection(self, *args, - req, client_error=ClientConnectorError, + req, timeout, + client_error=ClientConnectorError, **kwargs): try: - return await self._loop.create_connection(*args, **kwargs) + with CeilTimeout(timeout.sock_connect): + return await self._loop.create_connection(*args, **kwargs) except certificate_errors as exc: raise ClientConnectorCertificateError( req.connection_key, exc) from exc @@ -833,9 +828,8 @@ async def _wrap_create_connection(self, *args, except OSError as exc: raise client_error(req.connection_key, exc) from exc - async def _create_direct_connection(self, req, - *, client_error=ClientConnectorError, - traces=None): + async def _create_direct_connection(self, req, traces, timeout, + *, client_error=ClientConnectorError): sslcontext = self._get_ssl_context(req) fingerprint = self._get_fingerprint(req) @@ -860,7 +854,7 @@ async def _create_direct_connection(self, req, try: transp, proto = await self._wrap_create_connection( - self._factory, host, port, + self._factory, host, port, timeout=timeout, ssl=sslcontext, family=hinfo['family'], proto=hinfo['proto'], flags=hinfo['flags'], server_hostname=hinfo['hostname'] if sslcontext else None, @@ -884,7 +878,7 @@ async def _create_direct_connection(self, req, else: raise last_exc - async def _create_proxy_connection(self, req, traces=None): + async def _create_proxy_connection(self, req, traces, timeout): headers = {} if req.proxy_headers is not None: headers = req.proxy_headers @@ -899,7 +893,7 @@ async def _create_proxy_connection(self, req, traces=None): # create connection to proxy server transport, proto = await self._create_direct_connection( - proxy_req, client_error=ClientProxyConnectionError) + proxy_req, [], timeout, client_error=ClientProxyConnectionError) auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None) if auth is not None: @@ -955,7 +949,8 @@ async def _create_proxy_connection(self, req, traces=None): transport.close() transport, proto = await self._wrap_create_connection( - self._factory, ssl=sslcontext, sock=rawsock, + self._factory, timeout=timeout, + ssl=sslcontext, sock=rawsock, server_hostname=req.host, req=req) finally: @@ -988,10 +983,11 @@ def path(self): """Path to unix socket.""" return self._path - async def _create_connection(self, req, traces=None): + async def _create_connection(self, req, traces, timeout): try: - _, proto = await self._loop.create_unix_connection( - self._factory, self._path) + with CeilTimeout(timeout.sock_connect): + _, proto = await self._loop.create_unix_connection( + self._factory, self._path) except OSError as exc: raise ClientConnectorError(req.connection_key, exc) from exc diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 46cad3dc316..deb6846a15e 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -1471,7 +1471,8 @@ Utilities ClientTimeout ^^^^^^^^^^^^^ -.. class:: ClientTimeout(*, total=None, connect=None, sock_read=None) +.. class:: ClientTimeout(*, total=None, connect=None, \ + sock_connect, sock_read=None) A data class for client timeout settings. @@ -1483,7 +1484,20 @@ ClientTimeout .. attribute:: connect - A timeout for connecting to a peer. + Total timeout for acquiring a connection from pool. The time + consists connection establishment for a new connection or + waiting for a free connection from a pool if pool connection + limits are exceeded. + + For pure socket connection establishment time use + :attr:`sock_connect`. + + :class:`float`, ``None`` by default. + + .. attribute:: sock_connect + + A timeout for connecting to a peer for a new connection, not + given from a pool. See also :attr:`connect`. :class:`float`, ``None`` by default. diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 3155f4a437a..0087fcaec1f 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -1218,7 +1218,7 @@ async def send(self, conn): called = True return resp - async def create_connection(req, traces=None): + async def create_connection(req, traces, timeout): assert isinstance(req, CustomRequest) return mock.Mock() connector = BaseConnector(loop=loop) diff --git a/tests/test_client_session.py b/tests/test_client_session.py index ab4512c3521..047ed796f66 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -367,7 +367,7 @@ async def test_reraise_os_error(create_session): req.send = mock.Mock(side_effect=err) session = create_session(request_class=req_factory) - async def create_connection(req, traces=None): + async def create_connection(req, traces, timeout): # return self.transport, self.protocol return mock.Mock() session._connector._create_connection = create_connection @@ -393,12 +393,12 @@ class UnexpectedException(BaseException): connections = [] original_connect = session._connector.connect - async def connect(req, traces=None): - conn = await original_connect(req, traces=traces) + async def connect(req, traces, timeout): + conn = await original_connect(req, traces, timeout) connections.append(conn) return conn - async def create_connection(req, traces=None): + async def create_connection(req, traces, timeout): # return self.transport, self.protocol conn = mock.Mock() return conn diff --git a/tests/test_connector.py b/tests/test_connector.py index bf49545dee0..75894378264 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -16,7 +16,7 @@ import aiohttp from aiohttp import client, web -from aiohttp.client import ClientRequest +from aiohttp.client import ClientRequest, ClientTimeout from aiohttp.client_reqrep import ConnectionKey from aiohttp.connector import Connection, _DNSCacheTable from aiohttp.test_utils import make_mocked_coro, unused_port @@ -222,7 +222,7 @@ def test_del_empty_conector(loop): async def test_create_conn(loop): conn = aiohttp.BaseConnector(loop=loop) with pytest.raises(NotImplementedError): - await conn._create_connection(object()) + await conn._create_connection(object(), [], object()) def test_context_manager(loop): @@ -488,7 +488,7 @@ async def certificate_error(*args, **kwargs): conn._loop.create_connection = certificate_error with pytest.raises(aiohttp.ClientConnectorCertificateError) as ctx: - await conn.connect(req) + await conn.connect(req, [], ClientTimeout()) assert isinstance(ctx.value, ssl.CertificateError) assert isinstance(ctx.value.certificate_error, ssl.CertificateError) @@ -592,7 +592,7 @@ def get_extra_info(param): conn._loop.create_connection = create_connection - await conn.connect(req) + await conn.connect(req, [], ClientTimeout()) assert ips == ips_tried assert os_error @@ -935,7 +935,7 @@ def test_dns_error(loop): loop=loop) with pytest.raises(aiohttp.ClientConnectorError): - loop.run_until_complete(connector.connect(req)) + loop.run_until_complete(connector.connect(req, [], ClientTimeout())) def test_get_pop_empty_conns(loop): @@ -1008,7 +1008,7 @@ async def test_connect(loop, key): conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) - connection = await conn.connect(req) + connection = await conn.connect(req, [], ClientTimeout()) assert not conn._create_connection.called assert connection._protocol is proto assert connection.transport is proto.transport @@ -1050,7 +1050,7 @@ async def test_connect_tracing(loop): conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) - conn2 = await conn.connect(req, traces=traces) + conn2 = await conn.connect(req, traces, ClientTimeout()) conn2.release() on_connection_create_start.assert_called_with( @@ -1076,7 +1076,7 @@ async def test_close_during_connect(loop): conn._create_connection = mock.Mock() conn._create_connection.return_value = fut - task = loop.create_task(conn.connect(req)) + task = loop.create_task(conn.connect(req, None, ClientTimeout())) await asyncio.sleep(0, loop=loop) conn.close() @@ -1379,7 +1379,7 @@ async def test_connect_with_limit(loop, key): conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) - connection1 = await conn.connect(req) + connection1 = await conn.connect(req, None, ClientTimeout()) assert connection1._protocol == proto assert 1 == len(conn._acquired) @@ -1391,7 +1391,7 @@ async def test_connect_with_limit(loop, key): async def f(): nonlocal acquired - connection2 = await conn.connect(req) + connection2 = await conn.connect(req, None, ClientTimeout()) acquired = True assert 1 == len(conn._acquired) assert 1 == len(conn._acquired_per_host[key]) @@ -1445,13 +1445,10 @@ async def test_connect_queued_operation_tracing(loop, key): conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) - connection1 = await conn.connect(req, traces=traces) + connection1 = await conn.connect(req, traces, ClientTimeout()) async def f(): - connection2 = await conn.connect( - req, - traces=traces - ) + connection2 = await conn.connect(req, traces, ClientTimeout()) on_connection_queued_start.assert_called_with( session, trace_config_ctx, @@ -1500,7 +1497,7 @@ async def test_connect_reuseconn_tracing(loop, key): conn = aiohttp.BaseConnector(loop=loop, limit=1) conn._conns[key] = [(proto, loop.time())] - conn2 = await conn.connect(req, traces=traces) + conn2 = await conn.connect(req, traces, ClientTimeout()) conn2.release() on_connection_reuseconn.assert_called_with( @@ -1524,11 +1521,11 @@ async def test_connect_with_limit_and_limit_per_host(loop, key): conn._create_connection.return_value.set_result(proto) acquired = False - connection1 = await conn.connect(req) + connection1 = await conn.connect(req, None, ClientTimeout()) async def f(): nonlocal acquired - connection2 = await conn.connect(req) + connection2 = await conn.connect(req, None, ClientTimeout()) acquired = True assert 1 == len(conn._acquired) assert 1 == len(conn._acquired_per_host[key]) @@ -1558,11 +1555,11 @@ async def test_connect_with_no_limit_and_limit_per_host(loop, key): conn._create_connection.return_value.set_result(proto) acquired = False - connection1 = await conn.connect(req) + connection1 = await conn.connect(req, None, ClientTimeout()) async def f(): nonlocal acquired - connection2 = await conn.connect(req) + connection2 = await conn.connect(req, None, ClientTimeout()) acquired = True connection2.release() @@ -1590,11 +1587,11 @@ async def test_connect_with_no_limits(loop, key): conn._create_connection.return_value.set_result(proto) acquired = False - connection1 = await conn.connect(req) + connection1 = await conn.connect(req, None, ClientTimeout()) async def f(): nonlocal acquired - connection2 = await conn.connect(req) + connection2 = await conn.connect(req, None, ClientTimeout()) acquired = True assert 1 == len(conn._acquired) assert 1 == len(conn._acquired_per_host[key]) @@ -1623,7 +1620,7 @@ async def test_connect_with_limit_cancelled(loop): conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) - connection = await conn.connect(req) + connection = await conn.connect(req, None, ClientTimeout()) assert connection._protocol == proto assert connection.transport == proto.transport @@ -1631,7 +1628,8 @@ async def test_connect_with_limit_cancelled(loop): with pytest.raises(asyncio.TimeoutError): # limit exhausted - await asyncio.wait_for(conn.connect(req), 0.01, loop=loop) + await asyncio.wait_for(conn.connect(req, None, ClientTimeout()), + 0.01, loop=loop) connection.close() @@ -1646,7 +1644,7 @@ def check_with_exc(err): with pytest.raises(Exception): req = mock.Mock() - yield from conn.connect(req) + yield from conn.connect(req, None, ClientTimeout()) assert not conn._waiters @@ -1670,7 +1668,7 @@ async def test_connect_with_limit_concurrent(loop): # Use a real coroutine for _create_connection; a mock would mask # problems that only happen when the method yields. - async def create_connection(req, traces=None): + async def create_connection(req, traces, timeout): nonlocal num_connections num_connections += 1 await asyncio.sleep(0, loop=loop) @@ -1701,7 +1699,7 @@ async def f(start=True): return num_requests += 1 if not start: - connection = await conn.connect(req) + connection = await conn.connect(req, None, ClientTimeout()) await asyncio.sleep(0, loop=loop) connection.release() tasks = [ @@ -1725,7 +1723,7 @@ async def test_connect_waiters_cleanup(loop): conn = aiohttp.BaseConnector(loop=loop, limit=1) conn._available_connections = mock.Mock(return_value=0) - t = loop.create_task(conn.connect(req)) + t = loop.create_task(conn.connect(req, None, ClientTimeout())) await asyncio.sleep(0, loop=loop) assert conn._waiters.keys() @@ -1744,7 +1742,7 @@ async def test_connect_waiters_cleanup_key_error(loop): conn = aiohttp.BaseConnector(loop=loop, limit=1) conn._available_connections = mock.Mock(return_value=0) - t = loop.create_task(conn.connect(req)) + t = loop.create_task(conn.connect(req, None, ClientTimeout())) await asyncio.sleep(0, loop=loop) assert conn._waiters.keys() @@ -1771,7 +1769,7 @@ async def test_close_with_acquired_connection(loop): conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) - connection = await conn.connect(req) + connection = await conn.connect(req, None, ClientTimeout()) assert 1 == len(conn._acquired) conn.close() @@ -1840,7 +1838,7 @@ async def test_error_on_connection(loop, key): fut = loop.create_future() exc = OSError() - async def create_connection(req, traces=None): + async def create_connection(req, traces, timeout): nonlocal i i += 1 if i == 1: @@ -1851,8 +1849,8 @@ async def create_connection(req, traces=None): conn._create_connection = create_connection - t1 = loop.create_task(conn.connect(req)) - t2 = loop.create_task(conn.connect(req)) + t1 = loop.create_task(conn.connect(req, None, ClientTimeout())) + t2 = loop.create_task(conn.connect(req, None, ClientTimeout())) await asyncio.sleep(0, loop=loop) assert not t1.done() assert not t2.done() @@ -1885,7 +1883,7 @@ async def create_connection(req, traces=None): conn._acquired.add(proto) - conn2 = loop.create_task(conn.connect(req)) + conn2 = loop.create_task(conn.connect(req, None, ClientTimeout())) await asyncio.sleep(0, loop=loop) conn2.cancel() @@ -1905,7 +1903,7 @@ async def test_error_on_connection_with_cancelled_waiter(loop, key): fut2 = loop.create_future() exc = OSError() - async def create_connection(req, traces=None): + async def create_connection(req, traces, timeout): nonlocal i i += 1 if i == 1: @@ -1918,9 +1916,9 @@ async def create_connection(req, traces=None): conn._create_connection = create_connection - t1 = loop.create_task(conn.connect(req)) - t2 = loop.create_task(conn.connect(req)) - t3 = loop.create_task(conn.connect(req)) + t1 = loop.create_task(conn.connect(req, None, ClientTimeout())) + t2 = loop.create_task(conn.connect(req, None, ClientTimeout())) + t3 = loop.create_task(conn.connect(req, None, ClientTimeout())) await asyncio.sleep(0, loop=loop) assert not t1.done() assert not t2.done() @@ -1965,7 +1963,7 @@ def test_unix_connector_not_found(loop): 'GET', URL('http://www.python.org'), loop=loop) with pytest.raises(aiohttp.ClientConnectorError): - loop.run_until_complete(connector.connect(req)) + loop.run_until_complete(connector.connect(req, None, ClientTimeout())) @pytest.mark.skipif(not hasattr(socket, 'AF_UNIX'), @@ -1979,7 +1977,7 @@ def test_unix_connector_permission(loop): 'GET', URL('http://www.python.org'), loop=loop) with pytest.raises(aiohttp.ClientConnectorError): - loop.run_until_complete(connector.connect(req)) + loop.run_until_complete(connector.connect(req, None, ClientTimeout())) def test_default_use_dns_cache(loop): @@ -1997,7 +1995,7 @@ async def test_resolver_not_called_with_address_is_ip(loop): response_class=mock.Mock()) with pytest.raises(OSError): - await connector.connect(req) + await connector.connect(req, None, ClientTimeout()) resolver.resolve.assert_not_called() diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 9fea4ce350b..22698068777 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -52,7 +52,8 @@ def test_connect(self, ClientRequestMock): }) self.loop.create_connection = make_mocked_coro( (proto.transport, proto)) - conn = self.loop.run_until_complete(connector.connect(req)) + conn = self.loop.run_until_complete( + connector.connect(req, None, aiohttp.ClientTimeout())) self.assertEqual(req.url, URL('http://www.python.org')) self.assertIs(conn._protocol, proto) self.assertIs(conn.transport, proto.transport) @@ -82,7 +83,8 @@ def test_proxy_headers(self, ClientRequestMock): }) self.loop.create_connection = make_mocked_coro( (proto.transport, proto)) - conn = self.loop.run_until_complete(connector.connect(req)) + conn = self.loop.run_until_complete(connector.connect( + req, None, aiohttp.ClientTimeout())) self.assertEqual(req.url, URL('http://www.python.org')) self.assertIs(conn._protocol, proto) self.assertIs(conn.transport, proto.transport) @@ -118,7 +120,8 @@ def test_proxy_dns_error(self): ) expected_headers = dict(req.headers) with self.assertRaises(aiohttp.ClientConnectorError): - self.loop.run_until_complete(connector.connect(req)) + self.loop.run_until_complete(connector.connect( + req, None, aiohttp.ClientTimeout())) self.assertEqual(req.url.path, '/') self.assertEqual(dict(req.headers), expected_headers) @@ -138,7 +141,8 @@ def test_proxy_connection_error(self): loop=self.loop, ) with self.assertRaises(aiohttp.ClientProxyConnectionError): - self.loop.run_until_complete(connector.connect(req)) + self.loop.run_until_complete(connector.connect( + req, None, aiohttp.ClientTimeout())) @mock.patch('aiohttp.connector.ClientRequest') def test_https_connect(self, ClientRequestMock): @@ -170,7 +174,8 @@ def test_https_connect(self, ClientRequestMock): proxy=URL('http://proxy.example.com'), loop=self.loop, ) - self.loop.run_until_complete(connector._create_connection(req)) + self.loop.run_until_complete( + connector._create_connection(req, None, aiohttp.ClientTimeout())) self.assertEqual(req.url.path, '/') self.assertEqual(proxy_req.method, 'CONNECT') @@ -228,7 +233,8 @@ def create_connection(*args, **kwargs): loop=self.loop, ) with self.assertRaises(aiohttp.ClientConnectorCertificateError): - self.loop.run_until_complete(connector._create_connection(req)) + self.loop.run_until_complete(connector._create_connection( + req, None, aiohttp.ClientTimeout())) @mock.patch('aiohttp.connector.ClientRequest') def test_https_connect_ssl_error(self, ClientRequestMock): @@ -276,7 +282,8 @@ def create_connection(*args, **kwargs): loop=self.loop, ) with self.assertRaises(aiohttp.ClientConnectorSSLError): - self.loop.run_until_complete(connector._create_connection(req)) + self.loop.run_until_complete(connector._create_connection( + req, None, aiohttp.ClientTimeout())) @mock.patch('aiohttp.connector.ClientRequest') def test_https_connect_runtime_error(self, ClientRequestMock): @@ -311,7 +318,8 @@ def test_https_connect_runtime_error(self, ClientRequestMock): ) with self.assertRaisesRegex( RuntimeError, "Transport does not expose socket instance"): - self.loop.run_until_complete(connector._create_connection(req)) + self.loop.run_until_complete(connector._create_connection( + req, None, aiohttp.ClientTimeout())) self.loop.run_until_complete(proxy_req.close()) proxy_resp.close() @@ -351,7 +359,8 @@ def test_https_connect_http_proxy_error(self, ClientRequestMock): ) with self.assertRaisesRegex( aiohttp.ClientHttpProxyError, "400, message='bad request'"): - self.loop.run_until_complete(connector._create_connection(req)) + self.loop.run_until_complete(connector._create_connection( + req, None, aiohttp.ClientTimeout())) self.loop.run_until_complete(proxy_req.close()) proxy_resp.close() @@ -390,7 +399,8 @@ def test_https_connect_resp_start_error(self, ClientRequestMock): loop=self.loop, ) with self.assertRaisesRegex(OSError, "error message"): - self.loop.run_until_complete(connector._create_connection(req)) + self.loop.run_until_complete(connector._create_connection( + req, None, aiohttp.ClientTimeout())) @mock.patch('aiohttp.connector.ClientRequest') def test_request_port(self, ClientRequestMock): @@ -412,7 +422,8 @@ def test_request_port(self, ClientRequestMock): proxy=URL('http://proxy.example.com'), loop=self.loop, ) - self.loop.run_until_complete(connector._create_connection(req)) + self.loop.run_until_complete(connector._create_connection( + req, None, aiohttp.ClientTimeout())) self.assertEqual(req.url, URL('http://localhost:1234/path')) def test_proxy_auth_property(self): @@ -460,7 +471,8 @@ def test_https_connect_pass_ssl_context(self, ClientRequestMock): proxy=URL('http://proxy.example.com'), loop=self.loop, ) - self.loop.run_until_complete(connector._create_connection(req)) + self.loop.run_until_complete(connector._create_connection( + req, None, aiohttp.ClientTimeout())) self.loop.create_connection.assert_called_with( mock.ANY, @@ -515,7 +527,8 @@ def test_https_auth(self, ClientRequestMock): ) self.assertNotIn('AUTHORIZATION', req.headers) self.assertNotIn('PROXY-AUTHORIZATION', req.headers) - self.loop.run_until_complete(connector._create_connection(req)) + self.loop.run_until_complete( + connector._create_connection(req, None, aiohttp.ClientTimeout())) self.assertEqual(req.url.path, '/') self.assertNotIn('AUTHORIZATION', req.headers) @@ -526,7 +539,7 @@ def test_https_auth(self, ClientRequestMock): connector._resolve_host.assert_called_with( 'proxy.example.com', 80, - traces=None) + traces=mock.ANY) self.loop.run_until_complete(proxy_req.close()) proxy_resp.close()