comparison venv/lib/python2.7/site-packages/requests/adapters.py @ 0:d67268158946 draft

planemo upload commit a3f181f5f126803c654b3a66dd4e83a48f7e203b
author bcclaywell
date Mon, 12 Oct 2015 17:43:33 -0400
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:d67268158946
1 # -*- coding: utf-8 -*-
2
3 """
4 requests.adapters
5 ~~~~~~~~~~~~~~~~~
6
7 This module contains the transport adapters that Requests uses to define
8 and maintain connections.
9 """
10
11 import socket
12
13 from .models import Response
14 from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
15 from .packages.urllib3.response import HTTPResponse
16 from .packages.urllib3.util import Timeout as TimeoutSauce
17 from .packages.urllib3.util.retry import Retry
18 from .compat import urlparse, basestring
19 from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
20 prepend_scheme_if_needed, get_auth_from_url, urldefragauth,
21 select_proxy)
22 from .structures import CaseInsensitiveDict
23 from .packages.urllib3.exceptions import ClosedPoolError
24 from .packages.urllib3.exceptions import ConnectTimeoutError
25 from .packages.urllib3.exceptions import HTTPError as _HTTPError
26 from .packages.urllib3.exceptions import MaxRetryError
27 from .packages.urllib3.exceptions import ProxyError as _ProxyError
28 from .packages.urllib3.exceptions import ProtocolError
29 from .packages.urllib3.exceptions import ReadTimeoutError
30 from .packages.urllib3.exceptions import SSLError as _SSLError
31 from .packages.urllib3.exceptions import ResponseError
32 from .cookies import extract_cookies_to_jar
33 from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
34 ProxyError, RetryError)
35 from .auth import _basic_auth_str
36
37 DEFAULT_POOLBLOCK = False
38 DEFAULT_POOLSIZE = 10
39 DEFAULT_RETRIES = 0
40 DEFAULT_POOL_TIMEOUT = None
41
42
43 class BaseAdapter(object):
44 """The Base Transport Adapter"""
45
46 def __init__(self):
47 super(BaseAdapter, self).__init__()
48
49 def send(self):
50 raise NotImplementedError
51
52 def close(self):
53 raise NotImplementedError
54
55
56 class HTTPAdapter(BaseAdapter):
57 """The built-in HTTP Adapter for urllib3.
58
59 Provides a general-case interface for Requests sessions to contact HTTP and
60 HTTPS urls by implementing the Transport Adapter interface. This class will
61 usually be created by the :class:`Session <Session>` class under the
62 covers.
63
64 :param pool_connections: The number of urllib3 connection pools to cache.
65 :param pool_maxsize: The maximum number of connections to save in the pool.
66 :param int max_retries: The maximum number of retries each connection
67 should attempt. Note, this applies only to failed DNS lookups, socket
68 connections and connection timeouts, never to requests where data has
69 made it to the server. By default, Requests does not retry failed
70 connections. If you need granular control over the conditions under
71 which we retry a request, import urllib3's ``Retry`` class and pass
72 that instead.
73 :param pool_block: Whether the connection pool should block for connections.
74
75 Usage::
76
77 >>> import requests
78 >>> s = requests.Session()
79 >>> a = requests.adapters.HTTPAdapter(max_retries=3)
80 >>> s.mount('http://', a)
81 """
82 __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
83 '_pool_block']
84
85 def __init__(self, pool_connections=DEFAULT_POOLSIZE,
86 pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
87 pool_block=DEFAULT_POOLBLOCK):
88 if max_retries == DEFAULT_RETRIES:
89 self.max_retries = Retry(0, read=False)
90 else:
91 self.max_retries = Retry.from_int(max_retries)
92 self.config = {}
93 self.proxy_manager = {}
94
95 super(HTTPAdapter, self).__init__()
96
97 self._pool_connections = pool_connections
98 self._pool_maxsize = pool_maxsize
99 self._pool_block = pool_block
100
101 self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
102
103 def __getstate__(self):
104 return dict((attr, getattr(self, attr, None)) for attr in
105 self.__attrs__)
106
107 def __setstate__(self, state):
108 # Can't handle by adding 'proxy_manager' to self.__attrs__ because
109 # because self.poolmanager uses a lambda function, which isn't pickleable.
110 self.proxy_manager = {}
111 self.config = {}
112
113 for attr, value in state.items():
114 setattr(self, attr, value)
115
116 self.init_poolmanager(self._pool_connections, self._pool_maxsize,
117 block=self._pool_block)
118
119 def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
120 """Initializes a urllib3 PoolManager.
121
122 This method should not be called from user code, and is only
123 exposed for use when subclassing the
124 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
125
126 :param connections: The number of urllib3 connection pools to cache.
127 :param maxsize: The maximum number of connections to save in the pool.
128 :param block: Block when no free connections are available.
129 :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
130 """
131 # save these values for pickling
132 self._pool_connections = connections
133 self._pool_maxsize = maxsize
134 self._pool_block = block
135
136 self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
137 block=block, strict=True, **pool_kwargs)
138
139 def proxy_manager_for(self, proxy, **proxy_kwargs):
140 """Return urllib3 ProxyManager for the given proxy.
141
142 This method should not be called from user code, and is only
143 exposed for use when subclassing the
144 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
145
146 :param proxy: The proxy to return a urllib3 ProxyManager for.
147 :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
148 :returns: ProxyManager
149 """
150 if not proxy in self.proxy_manager:
151 proxy_headers = self.proxy_headers(proxy)
152 self.proxy_manager[proxy] = proxy_from_url(
153 proxy,
154 proxy_headers=proxy_headers,
155 num_pools=self._pool_connections,
156 maxsize=self._pool_maxsize,
157 block=self._pool_block,
158 **proxy_kwargs)
159
160 return self.proxy_manager[proxy]
161
162 def cert_verify(self, conn, url, verify, cert):
163 """Verify a SSL certificate. This method should not be called from user
164 code, and is only exposed for use when subclassing the
165 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
166
167 :param conn: The urllib3 connection object associated with the cert.
168 :param url: The requested URL.
169 :param verify: Whether we should actually verify the certificate.
170 :param cert: The SSL certificate to verify.
171 """
172 if url.lower().startswith('https') and verify:
173
174 cert_loc = None
175
176 # Allow self-specified cert location.
177 if verify is not True:
178 cert_loc = verify
179
180 if not cert_loc:
181 cert_loc = DEFAULT_CA_BUNDLE_PATH
182
183 if not cert_loc:
184 raise Exception("Could not find a suitable SSL CA certificate bundle.")
185
186 conn.cert_reqs = 'CERT_REQUIRED'
187 conn.ca_certs = cert_loc
188 else:
189 conn.cert_reqs = 'CERT_NONE'
190 conn.ca_certs = None
191
192 if cert:
193 if not isinstance(cert, basestring):
194 conn.cert_file = cert[0]
195 conn.key_file = cert[1]
196 else:
197 conn.cert_file = cert
198
199 def build_response(self, req, resp):
200 """Builds a :class:`Response <requests.Response>` object from a urllib3
201 response. This should not be called from user code, and is only exposed
202 for use when subclassing the
203 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
204
205 :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
206 :param resp: The urllib3 response object.
207 """
208 response = Response()
209
210 # Fallback to None if there's no status_code, for whatever reason.
211 response.status_code = getattr(resp, 'status', None)
212
213 # Make headers case-insensitive.
214 response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
215
216 # Set encoding.
217 response.encoding = get_encoding_from_headers(response.headers)
218 response.raw = resp
219 response.reason = response.raw.reason
220
221 if isinstance(req.url, bytes):
222 response.url = req.url.decode('utf-8')
223 else:
224 response.url = req.url
225
226 # Add new cookies from the server.
227 extract_cookies_to_jar(response.cookies, req, resp)
228
229 # Give the Response some context.
230 response.request = req
231 response.connection = self
232
233 return response
234
235 def get_connection(self, url, proxies=None):
236 """Returns a urllib3 connection for the given URL. This should not be
237 called from user code, and is only exposed for use when subclassing the
238 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
239
240 :param url: The URL to connect to.
241 :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
242 """
243 proxy = select_proxy(url, proxies)
244
245 if proxy:
246 proxy = prepend_scheme_if_needed(proxy, 'http')
247 proxy_manager = self.proxy_manager_for(proxy)
248 conn = proxy_manager.connection_from_url(url)
249 else:
250 # Only scheme should be lower case
251 parsed = urlparse(url)
252 url = parsed.geturl()
253 conn = self.poolmanager.connection_from_url(url)
254
255 return conn
256
257 def close(self):
258 """Disposes of any internal state.
259
260 Currently, this just closes the PoolManager, which closes pooled
261 connections.
262 """
263 self.poolmanager.clear()
264
265 def request_url(self, request, proxies):
266 """Obtain the url to use when making the final request.
267
268 If the message is being sent through a HTTP proxy, the full URL has to
269 be used. Otherwise, we should only use the path portion of the URL.
270
271 This should not be called from user code, and is only exposed for use
272 when subclassing the
273 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
274
275 :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
276 :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
277 """
278 proxy = select_proxy(request.url, proxies)
279 scheme = urlparse(request.url).scheme
280 if proxy and scheme != 'https':
281 url = urldefragauth(request.url)
282 else:
283 url = request.path_url
284
285 return url
286
287 def add_headers(self, request, **kwargs):
288 """Add any headers needed by the connection. As of v2.0 this does
289 nothing by default, but is left for overriding by users that subclass
290 the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
291
292 This should not be called from user code, and is only exposed for use
293 when subclassing the
294 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
295
296 :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
297 :param kwargs: The keyword arguments from the call to send().
298 """
299 pass
300
301 def proxy_headers(self, proxy):
302 """Returns a dictionary of the headers to add to any request sent
303 through a proxy. This works with urllib3 magic to ensure that they are
304 correctly sent to the proxy, rather than in a tunnelled request if
305 CONNECT is being used.
306
307 This should not be called from user code, and is only exposed for use
308 when subclassing the
309 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
310
311 :param proxies: The url of the proxy being used for this request.
312 """
313 headers = {}
314 username, password = get_auth_from_url(proxy)
315
316 if username and password:
317 headers['Proxy-Authorization'] = _basic_auth_str(username,
318 password)
319
320 return headers
321
322 def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
323 """Sends PreparedRequest object. Returns Response object.
324
325 :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
326 :param stream: (optional) Whether to stream the request content.
327 :param timeout: (optional) How long to wait for the server to send
328 data before giving up, as a float, or a :ref:`(connect timeout,
329 read timeout) <timeouts>` tuple.
330 :type timeout: float or tuple
331 :param verify: (optional) Whether to verify SSL certificates.
332 :param cert: (optional) Any user-provided SSL certificate to be trusted.
333 :param proxies: (optional) The proxies dictionary to apply to the request.
334 """
335
336 conn = self.get_connection(request.url, proxies)
337
338 self.cert_verify(conn, request.url, verify, cert)
339 url = self.request_url(request, proxies)
340 self.add_headers(request)
341
342 chunked = not (request.body is None or 'Content-Length' in request.headers)
343
344 if isinstance(timeout, tuple):
345 try:
346 connect, read = timeout
347 timeout = TimeoutSauce(connect=connect, read=read)
348 except ValueError as e:
349 # this may raise a string formatting error.
350 err = ("Invalid timeout {0}. Pass a (connect, read) "
351 "timeout tuple, or a single float to set "
352 "both timeouts to the same value".format(timeout))
353 raise ValueError(err)
354 else:
355 timeout = TimeoutSauce(connect=timeout, read=timeout)
356
357 try:
358 if not chunked:
359 resp = conn.urlopen(
360 method=request.method,
361 url=url,
362 body=request.body,
363 headers=request.headers,
364 redirect=False,
365 assert_same_host=False,
366 preload_content=False,
367 decode_content=False,
368 retries=self.max_retries,
369 timeout=timeout
370 )
371
372 # Send the request.
373 else:
374 if hasattr(conn, 'proxy_pool'):
375 conn = conn.proxy_pool
376
377 low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
378
379 try:
380 low_conn.putrequest(request.method,
381 url,
382 skip_accept_encoding=True)
383
384 for header, value in request.headers.items():
385 low_conn.putheader(header, value)
386
387 low_conn.endheaders()
388
389 for i in request.body:
390 low_conn.send(hex(len(i))[2:].encode('utf-8'))
391 low_conn.send(b'\r\n')
392 low_conn.send(i)
393 low_conn.send(b'\r\n')
394 low_conn.send(b'0\r\n\r\n')
395
396 r = low_conn.getresponse()
397 resp = HTTPResponse.from_httplib(
398 r,
399 pool=conn,
400 connection=low_conn,
401 preload_content=False,
402 decode_content=False
403 )
404 except:
405 # If we hit any problems here, clean up the connection.
406 # Then, reraise so that we can handle the actual exception.
407 low_conn.close()
408 raise
409
410 except (ProtocolError, socket.error) as err:
411 raise ConnectionError(err, request=request)
412
413 except MaxRetryError as e:
414 if isinstance(e.reason, ConnectTimeoutError):
415 raise ConnectTimeout(e, request=request)
416
417 if isinstance(e.reason, ResponseError):
418 raise RetryError(e, request=request)
419
420 raise ConnectionError(e, request=request)
421
422 except ClosedPoolError as e:
423 raise ConnectionError(e, request=request)
424
425 except _ProxyError as e:
426 raise ProxyError(e)
427
428 except (_SSLError, _HTTPError) as e:
429 if isinstance(e, _SSLError):
430 raise SSLError(e, request=request)
431 elif isinstance(e, ReadTimeoutError):
432 raise ReadTimeout(e, request=request)
433 else:
434 raise
435
436 return self.build_response(request, resp)