comparison venv/lib/python2.7/site-packages/requests/packages/urllib3/connectionpool.py @ 0:d67268158946 draft

planemo upload commit a3f181f5f126803c654b3a66dd4e83a48f7e203b
author bcclaywell
date Mon, 12 Oct 2015 17:43:33 -0400
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:d67268158946
1 import errno
2 import logging
3 import sys
4 import warnings
5
6 from socket import error as SocketError, timeout as SocketTimeout
7 import socket
8
9 try: # Python 3
10 from queue import LifoQueue, Empty, Full
11 except ImportError:
12 from Queue import LifoQueue, Empty, Full
13 import Queue as _ # Platform-specific: Windows
14
15
16 from .exceptions import (
17 ClosedPoolError,
18 ProtocolError,
19 EmptyPoolError,
20 HeaderParsingError,
21 HostChangedError,
22 LocationValueError,
23 MaxRetryError,
24 ProxyError,
25 ConnectTimeoutError,
26 ReadTimeoutError,
27 SSLError,
28 TimeoutError,
29 InsecureRequestWarning,
30 NewConnectionError,
31 )
32 from .packages.ssl_match_hostname import CertificateError
33 from .packages import six
34 from .connection import (
35 port_by_scheme,
36 DummyConnection,
37 HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
38 HTTPException, BaseSSLError, ConnectionError
39 )
40 from .request import RequestMethods
41 from .response import HTTPResponse
42
43 from .util.connection import is_connection_dropped
44 from .util.response import assert_header_parsing
45 from .util.retry import Retry
46 from .util.timeout import Timeout
47 from .util.url import get_host, Url
48
49
50 xrange = six.moves.xrange
51
52 log = logging.getLogger(__name__)
53
54 _Default = object()
55
56
57 ## Pool objects
58 class ConnectionPool(object):
59 """
60 Base class for all connection pools, such as
61 :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
62 """
63
64 scheme = None
65 QueueCls = LifoQueue
66
67 def __init__(self, host, port=None):
68 if not host:
69 raise LocationValueError("No host specified.")
70
71 # httplib doesn't like it when we include brackets in ipv6 addresses
72 self.host = host.strip('[]')
73 self.port = port
74
75 def __str__(self):
76 return '%s(host=%r, port=%r)' % (type(self).__name__,
77 self.host, self.port)
78
79 def __enter__(self):
80 return self
81
82 def __exit__(self, exc_type, exc_val, exc_tb):
83 self.close()
84 # Return False to re-raise any potential exceptions
85 return False
86
87 def close():
88 """
89 Close all pooled connections and disable the pool.
90 """
91 pass
92
93
94 # This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
95 _blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK])
96
97
98 class HTTPConnectionPool(ConnectionPool, RequestMethods):
99 """
100 Thread-safe connection pool for one host.
101
102 :param host:
103 Host used for this HTTP Connection (e.g. "localhost"), passed into
104 :class:`httplib.HTTPConnection`.
105
106 :param port:
107 Port used for this HTTP Connection (None is equivalent to 80), passed
108 into :class:`httplib.HTTPConnection`.
109
110 :param strict:
111 Causes BadStatusLine to be raised if the status line can't be parsed
112 as a valid HTTP/1.0 or 1.1 status line, passed into
113 :class:`httplib.HTTPConnection`.
114
115 .. note::
116 Only works in Python 2. This parameter is ignored in Python 3.
117
118 :param timeout:
119 Socket timeout in seconds for each individual connection. This can
120 be a float or integer, which sets the timeout for the HTTP request,
121 or an instance of :class:`urllib3.util.Timeout` which gives you more
122 fine-grained control over request timeouts. After the constructor has
123 been parsed, this is always a `urllib3.util.Timeout` object.
124
125 :param maxsize:
126 Number of connections to save that can be reused. More than 1 is useful
127 in multithreaded situations. If ``block`` is set to False, more
128 connections will be created but they will not be saved once they've
129 been used.
130
131 :param block:
132 If set to True, no more than ``maxsize`` connections will be used at
133 a time. When no free connections are available, the call will block
134 until a connection has been released. This is a useful side effect for
135 particular multithreaded situations where one does not want to use more
136 than maxsize connections per host to prevent flooding.
137
138 :param headers:
139 Headers to include with all requests, unless other headers are given
140 explicitly.
141
142 :param retries:
143 Retry configuration to use by default with requests in this pool.
144
145 :param _proxy:
146 Parsed proxy URL, should not be used directly, instead, see
147 :class:`urllib3.connectionpool.ProxyManager`"
148
149 :param _proxy_headers:
150 A dictionary with proxy headers, should not be used directly,
151 instead, see :class:`urllib3.connectionpool.ProxyManager`"
152
153 :param \**conn_kw:
154 Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
155 :class:`urllib3.connection.HTTPSConnection` instances.
156 """
157
158 scheme = 'http'
159 ConnectionCls = HTTPConnection
160
161 def __init__(self, host, port=None, strict=False,
162 timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False,
163 headers=None, retries=None,
164 _proxy=None, _proxy_headers=None,
165 **conn_kw):
166 ConnectionPool.__init__(self, host, port)
167 RequestMethods.__init__(self, headers)
168
169 self.strict = strict
170
171 if not isinstance(timeout, Timeout):
172 timeout = Timeout.from_float(timeout)
173
174 if retries is None:
175 retries = Retry.DEFAULT
176
177 self.timeout = timeout
178 self.retries = retries
179
180 self.pool = self.QueueCls(maxsize)
181 self.block = block
182
183 self.proxy = _proxy
184 self.proxy_headers = _proxy_headers or {}
185
186 # Fill the queue up so that doing get() on it will block properly
187 for _ in xrange(maxsize):
188 self.pool.put(None)
189
190 # These are mostly for testing and debugging purposes.
191 self.num_connections = 0
192 self.num_requests = 0
193 self.conn_kw = conn_kw
194
195 if self.proxy:
196 # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
197 # We cannot know if the user has added default socket options, so we cannot replace the
198 # list.
199 self.conn_kw.setdefault('socket_options', [])
200
201 def _new_conn(self):
202 """
203 Return a fresh :class:`HTTPConnection`.
204 """
205 self.num_connections += 1
206 log.info("Starting new HTTP connection (%d): %s" %
207 (self.num_connections, self.host))
208
209 conn = self.ConnectionCls(host=self.host, port=self.port,
210 timeout=self.timeout.connect_timeout,
211 strict=self.strict, **self.conn_kw)
212 return conn
213
214 def _get_conn(self, timeout=None):
215 """
216 Get a connection. Will return a pooled connection if one is available.
217
218 If no connections are available and :prop:`.block` is ``False``, then a
219 fresh connection is returned.
220
221 :param timeout:
222 Seconds to wait before giving up and raising
223 :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
224 :prop:`.block` is ``True``.
225 """
226 conn = None
227 try:
228 conn = self.pool.get(block=self.block, timeout=timeout)
229
230 except AttributeError: # self.pool is None
231 raise ClosedPoolError(self, "Pool is closed.")
232
233 except Empty:
234 if self.block:
235 raise EmptyPoolError(self,
236 "Pool reached maximum size and no more "
237 "connections are allowed.")
238 pass # Oh well, we'll create a new connection then
239
240 # If this is a persistent connection, check if it got disconnected
241 if conn and is_connection_dropped(conn):
242 log.info("Resetting dropped connection: %s" % self.host)
243 conn.close()
244 if getattr(conn, 'auto_open', 1) == 0:
245 # This is a proxied connection that has been mutated by
246 # httplib._tunnel() and cannot be reused (since it would
247 # attempt to bypass the proxy)
248 conn = None
249
250 return conn or self._new_conn()
251
252 def _put_conn(self, conn):
253 """
254 Put a connection back into the pool.
255
256 :param conn:
257 Connection object for the current host and port as returned by
258 :meth:`._new_conn` or :meth:`._get_conn`.
259
260 If the pool is already full, the connection is closed and discarded
261 because we exceeded maxsize. If connections are discarded frequently,
262 then maxsize should be increased.
263
264 If the pool is closed, then the connection will be closed and discarded.
265 """
266 try:
267 self.pool.put(conn, block=False)
268 return # Everything is dandy, done.
269 except AttributeError:
270 # self.pool is None.
271 pass
272 except Full:
273 # This should never happen if self.block == True
274 log.warning(
275 "Connection pool is full, discarding connection: %s" %
276 self.host)
277
278 # Connection never got put back into the pool, close it.
279 if conn:
280 conn.close()
281
282 def _validate_conn(self, conn):
283 """
284 Called right before a request is made, after the socket is created.
285 """
286 pass
287
288 def _prepare_proxy(self, conn):
289 # Nothing to do for HTTP connections.
290 pass
291
292 def _get_timeout(self, timeout):
293 """ Helper that always returns a :class:`urllib3.util.Timeout` """
294 if timeout is _Default:
295 return self.timeout.clone()
296
297 if isinstance(timeout, Timeout):
298 return timeout.clone()
299 else:
300 # User passed us an int/float. This is for backwards compatibility,
301 # can be removed later
302 return Timeout.from_float(timeout)
303
304 def _raise_timeout(self, err, url, timeout_value):
305 """Is the error actually a timeout? Will raise a ReadTimeout or pass"""
306
307 if isinstance(err, SocketTimeout):
308 raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
309
310 # See the above comment about EAGAIN in Python 3. In Python 2 we have
311 # to specifically catch it and throw the timeout error
312 if hasattr(err, 'errno') and err.errno in _blocking_errnos:
313 raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
314
315 # Catch possible read timeouts thrown as SSL errors. If not the
316 # case, rethrow the original. We need to do this because of:
317 # http://bugs.python.org/issue10272
318 if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6
319 raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
320
321 def _make_request(self, conn, method, url, timeout=_Default,
322 **httplib_request_kw):
323 """
324 Perform a request on a given urllib connection object taken from our
325 pool.
326
327 :param conn:
328 a connection from one of our connection pools
329
330 :param timeout:
331 Socket timeout in seconds for the request. This can be a
332 float or integer, which will set the same timeout value for
333 the socket connect and the socket read, or an instance of
334 :class:`urllib3.util.Timeout`, which gives you more fine-grained
335 control over your timeouts.
336 """
337 self.num_requests += 1
338
339 timeout_obj = self._get_timeout(timeout)
340 timeout_obj.start_connect()
341 conn.timeout = timeout_obj.connect_timeout
342
343 # Trigger any extra validation we need to do.
344 try:
345 self._validate_conn(conn)
346 except (SocketTimeout, BaseSSLError) as e:
347 # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
348 self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
349 raise
350
351 # conn.request() calls httplib.*.request, not the method in
352 # urllib3.request. It also calls makefile (recv) on the socket.
353 conn.request(method, url, **httplib_request_kw)
354
355 # Reset the timeout for the recv() on the socket
356 read_timeout = timeout_obj.read_timeout
357
358 # App Engine doesn't have a sock attr
359 if getattr(conn, 'sock', None):
360 # In Python 3 socket.py will catch EAGAIN and return None when you
361 # try and read into the file pointer created by http.client, which
362 # instead raises a BadStatusLine exception. Instead of catching
363 # the exception and assuming all BadStatusLine exceptions are read
364 # timeouts, check for a zero timeout before making the request.
365 if read_timeout == 0:
366 raise ReadTimeoutError(
367 self, url, "Read timed out. (read timeout=%s)" % read_timeout)
368 if read_timeout is Timeout.DEFAULT_TIMEOUT:
369 conn.sock.settimeout(socket.getdefaulttimeout())
370 else: # None or a value
371 conn.sock.settimeout(read_timeout)
372
373 # Receive the response from the server
374 try:
375 try: # Python 2.7, use buffering of HTTP responses
376 httplib_response = conn.getresponse(buffering=True)
377 except TypeError: # Python 2.6 and older
378 httplib_response = conn.getresponse()
379 except (SocketTimeout, BaseSSLError, SocketError) as e:
380 self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
381 raise
382
383 # AppEngine doesn't have a version attr.
384 http_version = getattr(conn, '_http_vsn_str', 'HTTP/?')
385 log.debug("\"%s %s %s\" %s %s" % (method, url, http_version,
386 httplib_response.status,
387 httplib_response.length))
388
389 try:
390 assert_header_parsing(httplib_response.msg)
391 except HeaderParsingError as hpe: # Platform-specific: Python 3
392 log.warning(
393 'Failed to parse headers (url=%s): %s',
394 self._absolute_url(url), hpe, exc_info=True)
395
396 return httplib_response
397
398 def _absolute_url(self, path):
399 return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
400
401 def close(self):
402 """
403 Close all pooled connections and disable the pool.
404 """
405 # Disable access to the pool
406 old_pool, self.pool = self.pool, None
407
408 try:
409 while True:
410 conn = old_pool.get(block=False)
411 if conn:
412 conn.close()
413
414 except Empty:
415 pass # Done.
416
417 def is_same_host(self, url):
418 """
419 Check if the given ``url`` is a member of the same host as this
420 connection pool.
421 """
422 if url.startswith('/'):
423 return True
424
425 # TODO: Add optional support for socket.gethostbyname checking.
426 scheme, host, port = get_host(url)
427
428 # Use explicit default port for comparison when none is given
429 if self.port and not port:
430 port = port_by_scheme.get(scheme)
431 elif not self.port and port == port_by_scheme.get(scheme):
432 port = None
433
434 return (scheme, host, port) == (self.scheme, self.host, self.port)
435
436 def urlopen(self, method, url, body=None, headers=None, retries=None,
437 redirect=True, assert_same_host=True, timeout=_Default,
438 pool_timeout=None, release_conn=None, **response_kw):
439 """
440 Get a connection from the pool and perform an HTTP request. This is the
441 lowest level call for making a request, so you'll need to specify all
442 the raw details.
443
444 .. note::
445
446 More commonly, it's appropriate to use a convenience method provided
447 by :class:`.RequestMethods`, such as :meth:`request`.
448
449 .. note::
450
451 `release_conn` will only behave as expected if
452 `preload_content=False` because we want to make
453 `preload_content=False` the default behaviour someday soon without
454 breaking backwards compatibility.
455
456 :param method:
457 HTTP request method (such as GET, POST, PUT, etc.)
458
459 :param body:
460 Data to send in the request body (useful for creating
461 POST requests, see HTTPConnectionPool.post_url for
462 more convenience).
463
464 :param headers:
465 Dictionary of custom headers to send, such as User-Agent,
466 If-None-Match, etc. If None, pool headers are used. If provided,
467 these headers completely replace any pool-specific headers.
468
469 :param retries:
470 Configure the number of retries to allow before raising a
471 :class:`~urllib3.exceptions.MaxRetryError` exception.
472
473 Pass ``None`` to retry until you receive a response. Pass a
474 :class:`~urllib3.util.retry.Retry` object for fine-grained control
475 over different types of retries.
476 Pass an integer number to retry connection errors that many times,
477 but no other types of errors. Pass zero to never retry.
478
479 If ``False``, then retries are disabled and any exception is raised
480 immediately. Also, instead of raising a MaxRetryError on redirects,
481 the redirect response will be returned.
482
483 :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
484
485 :param redirect:
486 If True, automatically handle redirects (status codes 301, 302,
487 303, 307, 308). Each redirect counts as a retry. Disabling retries
488 will disable redirect, too.
489
490 :param assert_same_host:
491 If ``True``, will make sure that the host of the pool requests is
492 consistent else will raise HostChangedError. When False, you can
493 use the pool on an HTTP proxy and request foreign hosts.
494
495 :param timeout:
496 If specified, overrides the default timeout for this one
497 request. It may be a float (in seconds) or an instance of
498 :class:`urllib3.util.Timeout`.
499
500 :param pool_timeout:
501 If set and the pool is set to block=True, then this method will
502 block for ``pool_timeout`` seconds and raise EmptyPoolError if no
503 connection is available within the time period.
504
505 :param release_conn:
506 If False, then the urlopen call will not release the connection
507 back into the pool once a response is received (but will release if
508 you read the entire contents of the response such as when
509 `preload_content=True`). This is useful if you're not preloading
510 the response's content immediately. You will need to call
511 ``r.release_conn()`` on the response ``r`` to return the connection
512 back into the pool. If None, it takes the value of
513 ``response_kw.get('preload_content', True)``.
514
515 :param \**response_kw:
516 Additional parameters are passed to
517 :meth:`urllib3.response.HTTPResponse.from_httplib`
518 """
519 if headers is None:
520 headers = self.headers
521
522 if not isinstance(retries, Retry):
523 retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
524
525 if release_conn is None:
526 release_conn = response_kw.get('preload_content', True)
527
528 # Check host
529 if assert_same_host and not self.is_same_host(url):
530 raise HostChangedError(self, url, retries)
531
532 conn = None
533
534 # Merge the proxy headers. Only do this in HTTP. We have to copy the
535 # headers dict so we can safely change it without those changes being
536 # reflected in anyone else's copy.
537 if self.scheme == 'http':
538 headers = headers.copy()
539 headers.update(self.proxy_headers)
540
541 # Must keep the exception bound to a separate variable or else Python 3
542 # complains about UnboundLocalError.
543 err = None
544
545 try:
546 # Request a connection from the queue.
547 timeout_obj = self._get_timeout(timeout)
548 conn = self._get_conn(timeout=pool_timeout)
549
550 conn.timeout = timeout_obj.connect_timeout
551
552 is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
553 if is_new_proxy_conn:
554 self._prepare_proxy(conn)
555
556 # Make the request on the httplib connection object.
557 httplib_response = self._make_request(conn, method, url,
558 timeout=timeout_obj,
559 body=body, headers=headers)
560
561 # If we're going to release the connection in ``finally:``, then
562 # the request doesn't need to know about the connection. Otherwise
563 # it will also try to release it and we'll have a double-release
564 # mess.
565 response_conn = not release_conn and conn
566
567 # Import httplib's response into our own wrapper object
568 response = HTTPResponse.from_httplib(httplib_response,
569 pool=self,
570 connection=response_conn,
571 **response_kw)
572
573 # else:
574 # The connection will be put back into the pool when
575 # ``response.release_conn()`` is called (implicitly by
576 # ``response.read()``)
577
578 except Empty:
579 # Timed out by queue.
580 raise EmptyPoolError(self, "No pool connections are available.")
581
582 except (BaseSSLError, CertificateError) as e:
583 # Close the connection. If a connection is reused on which there
584 # was a Certificate error, the next request will certainly raise
585 # another Certificate error.
586 conn = conn and conn.close()
587 release_conn = True
588 raise SSLError(e)
589
590 except SSLError:
591 # Treat SSLError separately from BaseSSLError to preserve
592 # traceback.
593 conn = conn and conn.close()
594 release_conn = True
595 raise
596
597 except (TimeoutError, HTTPException, SocketError, ProtocolError) as e:
598 # Discard the connection for these exceptions. It will be
599 # be replaced during the next _get_conn() call.
600 conn = conn and conn.close()
601 release_conn = True
602
603 if isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
604 e = ProxyError('Cannot connect to proxy.', e)
605 elif isinstance(e, (SocketError, HTTPException)):
606 e = ProtocolError('Connection aborted.', e)
607
608 retries = retries.increment(method, url, error=e, _pool=self,
609 _stacktrace=sys.exc_info()[2])
610 retries.sleep()
611
612 # Keep track of the error for the retry warning.
613 err = e
614
615 finally:
616 if release_conn:
617 # Put the connection back to be reused. If the connection is
618 # expired then it will be None, which will get replaced with a
619 # fresh connection during _get_conn.
620 self._put_conn(conn)
621
622 if not conn:
623 # Try again
624 log.warning("Retrying (%r) after connection "
625 "broken by '%r': %s" % (retries, err, url))
626 return self.urlopen(method, url, body, headers, retries,
627 redirect, assert_same_host,
628 timeout=timeout, pool_timeout=pool_timeout,
629 release_conn=release_conn, **response_kw)
630
631 # Handle redirect?
632 redirect_location = redirect and response.get_redirect_location()
633 if redirect_location:
634 if response.status == 303:
635 method = 'GET'
636
637 try:
638 retries = retries.increment(method, url, response=response, _pool=self)
639 except MaxRetryError:
640 if retries.raise_on_redirect:
641 # Release the connection for this response, since we're not
642 # returning it to be released manually.
643 response.release_conn()
644 raise
645 return response
646
647 log.info("Redirecting %s -> %s" % (url, redirect_location))
648 return self.urlopen(method, redirect_location, body, headers,
649 retries=retries, redirect=redirect,
650 assert_same_host=assert_same_host,
651 timeout=timeout, pool_timeout=pool_timeout,
652 release_conn=release_conn, **response_kw)
653
654 # Check if we should retry the HTTP response.
655 if retries.is_forced_retry(method, status_code=response.status):
656 retries = retries.increment(method, url, response=response, _pool=self)
657 retries.sleep()
658 log.info("Forced retry: %s" % url)
659 return self.urlopen(method, url, body, headers,
660 retries=retries, redirect=redirect,
661 assert_same_host=assert_same_host,
662 timeout=timeout, pool_timeout=pool_timeout,
663 release_conn=release_conn, **response_kw)
664
665 return response
666
667
668 class HTTPSConnectionPool(HTTPConnectionPool):
669 """
670 Same as :class:`.HTTPConnectionPool`, but HTTPS.
671
672 When Python is compiled with the :mod:`ssl` module, then
673 :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
674 instead of :class:`.HTTPSConnection`.
675
676 :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
677 ``assert_hostname`` and ``host`` in this order to verify connections.
678 If ``assert_hostname`` is False, no verification is done.
679
680 The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
681 ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is
682 available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
683 the connection socket into an SSL socket.
684 """
685
686 scheme = 'https'
687 ConnectionCls = HTTPSConnection
688
689 def __init__(self, host, port=None,
690 strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1,
691 block=False, headers=None, retries=None,
692 _proxy=None, _proxy_headers=None,
693 key_file=None, cert_file=None, cert_reqs=None,
694 ca_certs=None, ssl_version=None,
695 assert_hostname=None, assert_fingerprint=None,
696 ca_cert_dir=None, **conn_kw):
697
698 HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize,
699 block, headers, retries, _proxy, _proxy_headers,
700 **conn_kw)
701
702 if ca_certs and cert_reqs is None:
703 cert_reqs = 'CERT_REQUIRED'
704
705 self.key_file = key_file
706 self.cert_file = cert_file
707 self.cert_reqs = cert_reqs
708 self.ca_certs = ca_certs
709 self.ca_cert_dir = ca_cert_dir
710 self.ssl_version = ssl_version
711 self.assert_hostname = assert_hostname
712 self.assert_fingerprint = assert_fingerprint
713
714 def _prepare_conn(self, conn):
715 """
716 Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
717 and establish the tunnel if proxy is used.
718 """
719
720 if isinstance(conn, VerifiedHTTPSConnection):
721 conn.set_cert(key_file=self.key_file,
722 cert_file=self.cert_file,
723 cert_reqs=self.cert_reqs,
724 ca_certs=self.ca_certs,
725 ca_cert_dir=self.ca_cert_dir,
726 assert_hostname=self.assert_hostname,
727 assert_fingerprint=self.assert_fingerprint)
728 conn.ssl_version = self.ssl_version
729
730 return conn
731
732 def _prepare_proxy(self, conn):
733 """
734 Establish tunnel connection early, because otherwise httplib
735 would improperly set Host: header to proxy's IP:port.
736 """
737 # Python 2.7+
738 try:
739 set_tunnel = conn.set_tunnel
740 except AttributeError: # Platform-specific: Python 2.6
741 set_tunnel = conn._set_tunnel
742
743 if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older
744 set_tunnel(self.host, self.port)
745 else:
746 set_tunnel(self.host, self.port, self.proxy_headers)
747
748 conn.connect()
749
750 def _new_conn(self):
751 """
752 Return a fresh :class:`httplib.HTTPSConnection`.
753 """
754 self.num_connections += 1
755 log.info("Starting new HTTPS connection (%d): %s"
756 % (self.num_connections, self.host))
757
758 if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
759 raise SSLError("Can't connect to HTTPS URL because the SSL "
760 "module is not available.")
761
762 actual_host = self.host
763 actual_port = self.port
764 if self.proxy is not None:
765 actual_host = self.proxy.host
766 actual_port = self.proxy.port
767
768 conn = self.ConnectionCls(host=actual_host, port=actual_port,
769 timeout=self.timeout.connect_timeout,
770 strict=self.strict, **self.conn_kw)
771
772 return self._prepare_conn(conn)
773
774 def _validate_conn(self, conn):
775 """
776 Called right before a request is made, after the socket is created.
777 """
778 super(HTTPSConnectionPool, self)._validate_conn(conn)
779
780 # Force connect early to allow us to validate the connection.
781 if not getattr(conn, 'sock', None): # AppEngine might not have `.sock`
782 conn.connect()
783
784 if not conn.is_verified:
785 warnings.warn((
786 'Unverified HTTPS request is being made. '
787 'Adding certificate verification is strongly advised. See: '
788 'https://urllib3.readthedocs.org/en/latest/security.html'),
789 InsecureRequestWarning)
790
791
792 def connection_from_url(url, **kw):
793 """
794 Given a url, return an :class:`.ConnectionPool` instance of its host.
795
796 This is a shortcut for not having to parse out the scheme, host, and port
797 of the url before creating an :class:`.ConnectionPool` instance.
798
799 :param url:
800 Absolute URL string that must include the scheme. Port is optional.
801
802 :param \**kw:
803 Passes additional parameters to the constructor of the appropriate
804 :class:`.ConnectionPool`. Useful for specifying things like
805 timeout, maxsize, headers, etc.
806
807 Example::
808
809 >>> conn = connection_from_url('http://google.com/')
810 >>> r = conn.request('GET', '/')
811 """
812 scheme, host, port = get_host(url)
813 if scheme == 'https':
814 return HTTPSConnectionPool(host, port=port, **kw)
815 else:
816 return HTTPConnectionPool(host, port=port, **kw)